]>
Commit | Line | Data |
---|---|---|
ac534736 | 1 | /* Tree inlining. |
d1e082c2 | 2 | Copyright (C) 2001-2013 Free Software Foundation, Inc. |
588d3ade AO |
3 | Contributed by Alexandre Oliva <aoliva@redhat.com> |
4 | ||
54a7b573 | 5 | This file is part of GCC. |
588d3ade | 6 | |
54a7b573 | 7 | GCC is free software; you can redistribute it and/or modify |
588d3ade | 8 | it under the terms of the GNU General Public License as published by |
9dcd6f09 | 9 | the Free Software Foundation; either version 3, or (at your option) |
588d3ade AO |
10 | any later version. |
11 | ||
54a7b573 | 12 | GCC is distributed in the hope that it will be useful, |
588d3ade AO |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
588d3ade AO |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
4977bab6 ZW |
23 | #include "coretypes.h" |
24 | #include "tm.h" | |
718f9c0f | 25 | #include "diagnostic-core.h" |
588d3ade AO |
26 | #include "tree.h" |
27 | #include "tree-inline.h" | |
d4e4baa9 AO |
28 | #include "flags.h" |
29 | #include "params.h" | |
30 | #include "input.h" | |
31 | #include "insn-config.h" | |
d4e4baa9 | 32 | #include "hashtab.h" |
d23c55c2 | 33 | #include "langhooks.h" |
e21aff8a SB |
34 | #include "basic-block.h" |
35 | #include "tree-iterator.h" | |
ddd2d57e | 36 | #include "intl.h" |
45b0be94 | 37 | #include "gimplify.h" |
442b4905 AM |
38 | #include "gimple-ssa.h" |
39 | #include "tree-cfg.h" | |
40 | #include "tree-phinodes.h" | |
41 | #include "ssa-iterators.h" | |
42 | #include "tree-ssanames.h" | |
43 | #include "tree-into-ssa.h" | |
44 | #include "tree-dfa.h" | |
7a300452 | 45 | #include "tree-ssa.h" |
18c6ada9 | 46 | #include "function.h" |
cf835838 | 47 | #include "tree-pretty-print.h" |
e21aff8a | 48 | #include "except.h" |
1eb3331e | 49 | #include "debug.h" |
e21aff8a | 50 | #include "pointer-set.h" |
19734dd8 | 51 | #include "ipa-prop.h" |
6946b3f7 | 52 | #include "value-prof.h" |
110cfe1c | 53 | #include "tree-pass.h" |
18177c7e | 54 | #include "target.h" |
a9e0d843 | 55 | #include "cfgloop.h" |
d4e4baa9 | 56 | |
2eb79bbb SB |
57 | #include "rtl.h" /* FIXME: For asm_str_count. */ |
58 | ||
6de9cd9a DN |
59 | /* I'm not real happy about this, but we need to handle gimple and |
60 | non-gimple trees. */ | |
588d3ade | 61 | |
1b369fae | 62 | /* Inlining, Cloning, Versioning, Parallelization |
e21aff8a SB |
63 | |
64 | Inlining: a function body is duplicated, but the PARM_DECLs are | |
65 | remapped into VAR_DECLs, and non-void RETURN_EXPRs become | |
726a989a | 66 | MODIFY_EXPRs that store to a dedicated returned-value variable. |
e21aff8a SB |
67 | The duplicated eh_region info of the copy will later be appended |
68 | to the info for the caller; the eh_region info in copied throwing | |
1d65f45c | 69 | statements and RESX statements are adjusted accordingly. |
e21aff8a | 70 | |
e21aff8a SB |
71 | Cloning: (only in C++) We have one body for a con/de/structor, and |
72 | multiple function decls, each with a unique parameter list. | |
73 | Duplicate the body, using the given splay tree; some parameters | |
74 | will become constants (like 0 or 1). | |
75 | ||
1b369fae RH |
76 | Versioning: a function body is duplicated and the result is a new |
77 | function rather than into blocks of an existing function as with | |
78 | inlining. Some parameters will become constants. | |
79 | ||
80 | Parallelization: a region of a function is duplicated resulting in | |
81 | a new function. Variables may be replaced with complex expressions | |
82 | to enable shared variable semantics. | |
83 | ||
e21aff8a SB |
84 | All of these will simultaneously lookup any callgraph edges. If |
85 | we're going to inline the duplicated function body, and the given | |
86 | function has some cloned callgraph nodes (one for each place this | |
87 | function will be inlined) those callgraph edges will be duplicated. | |
1b369fae | 88 | If we're cloning the body, those callgraph edges will be |
e21aff8a SB |
89 | updated to point into the new body. (Note that the original |
90 | callgraph node and edge list will not be altered.) | |
91 | ||
726a989a | 92 | See the CALL_EXPR handling case in copy_tree_body_r (). */ |
e21aff8a | 93 | |
d4e4baa9 AO |
94 | /* To Do: |
95 | ||
96 | o In order to make inlining-on-trees work, we pessimized | |
97 | function-local static constants. In particular, they are now | |
98 | always output, even when not addressed. Fix this by treating | |
99 | function-local static constants just like global static | |
100 | constants; the back-end already knows not to output them if they | |
101 | are not needed. | |
102 | ||
103 | o Provide heuristics to clamp inlining of recursive template | |
104 | calls? */ | |
105 | ||
7f9bc51b | 106 | |
7f9bc51b ZD |
107 | /* Weights that estimate_num_insns uses to estimate the size of the |
108 | produced code. */ | |
109 | ||
110 | eni_weights eni_size_weights; | |
111 | ||
112 | /* Weights that estimate_num_insns uses to estimate the time necessary | |
113 | to execute the produced code. */ | |
114 | ||
115 | eni_weights eni_time_weights; | |
116 | ||
d4e4baa9 AO |
117 | /* Prototypes. */ |
118 | ||
6938f93f | 119 | static tree declare_return_variable (copy_body_data *, tree, tree, basic_block); |
1b369fae | 120 | static void remap_block (tree *, copy_body_data *); |
1b369fae | 121 | static void copy_bind_expr (tree *, int *, copy_body_data *); |
e21aff8a | 122 | static void declare_inline_vars (tree, tree); |
892c7e1e | 123 | static void remap_save_expr (tree *, void *, int *); |
4a283090 | 124 | static void prepend_lexical_block (tree current_block, tree new_block); |
1b369fae | 125 | static tree copy_decl_to_var (tree, copy_body_data *); |
c08cd4c1 | 126 | static tree copy_result_decl_to_var (tree, copy_body_data *); |
1b369fae | 127 | static tree copy_decl_maybe_to_var (tree, copy_body_data *); |
726a989a | 128 | static gimple remap_gimple_stmt (gimple, copy_body_data *); |
078c3644 | 129 | static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id); |
e21aff8a | 130 | |
5e20bdd7 JZ |
131 | /* Insert a tree->tree mapping for ID. Despite the name suggests |
132 | that the trees should be variables, it is used for more than that. */ | |
133 | ||
1b369fae RH |
134 | void |
135 | insert_decl_map (copy_body_data *id, tree key, tree value) | |
5e20bdd7 | 136 | { |
6be42dd4 | 137 | *pointer_map_insert (id->decl_map, key) = value; |
5e20bdd7 JZ |
138 | |
139 | /* Always insert an identity map as well. If we see this same new | |
140 | node again, we won't want to duplicate it a second time. */ | |
141 | if (key != value) | |
6be42dd4 | 142 | *pointer_map_insert (id->decl_map, value) = value; |
5e20bdd7 JZ |
143 | } |
144 | ||
b5b8b0ac AO |
145 | /* Insert a tree->tree mapping for ID. This is only used for |
146 | variables. */ | |
147 | ||
148 | static void | |
149 | insert_debug_decl_map (copy_body_data *id, tree key, tree value) | |
150 | { | |
151 | if (!gimple_in_ssa_p (id->src_cfun)) | |
152 | return; | |
153 | ||
154 | if (!MAY_HAVE_DEBUG_STMTS) | |
155 | return; | |
156 | ||
157 | if (!target_for_debug_bind (key)) | |
158 | return; | |
159 | ||
160 | gcc_assert (TREE_CODE (key) == PARM_DECL); | |
161 | gcc_assert (TREE_CODE (value) == VAR_DECL); | |
162 | ||
163 | if (!id->debug_map) | |
164 | id->debug_map = pointer_map_create (); | |
165 | ||
166 | *pointer_map_insert (id->debug_map, key) = value; | |
167 | } | |
168 | ||
082ab5ff JJ |
169 | /* If nonzero, we're remapping the contents of inlined debug |
170 | statements. If negative, an error has occurred, such as a | |
171 | reference to a variable that isn't available in the inlined | |
172 | context. */ | |
173 | static int processing_debug_stmt = 0; | |
174 | ||
110cfe1c JH |
175 | /* Construct new SSA name for old NAME. ID is the inline context. */ |
176 | ||
177 | static tree | |
178 | remap_ssa_name (tree name, copy_body_data *id) | |
179 | { | |
70b5e7dc | 180 | tree new_tree, var; |
6be42dd4 | 181 | tree *n; |
110cfe1c JH |
182 | |
183 | gcc_assert (TREE_CODE (name) == SSA_NAME); | |
184 | ||
6be42dd4 | 185 | n = (tree *) pointer_map_contains (id->decl_map, name); |
110cfe1c | 186 | if (n) |
129a37fc | 187 | return unshare_expr (*n); |
110cfe1c | 188 | |
082ab5ff JJ |
189 | if (processing_debug_stmt) |
190 | { | |
67386041 RG |
191 | if (SSA_NAME_IS_DEFAULT_DEF (name) |
192 | && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL | |
ddb555ed JJ |
193 | && id->entry_bb == NULL |
194 | && single_succ_p (ENTRY_BLOCK_PTR)) | |
195 | { | |
196 | tree vexpr = make_node (DEBUG_EXPR_DECL); | |
197 | gimple def_temp; | |
198 | gimple_stmt_iterator gsi; | |
199 | tree val = SSA_NAME_VAR (name); | |
200 | ||
201 | n = (tree *) pointer_map_contains (id->decl_map, val); | |
202 | if (n != NULL) | |
203 | val = *n; | |
204 | if (TREE_CODE (val) != PARM_DECL) | |
205 | { | |
206 | processing_debug_stmt = -1; | |
207 | return name; | |
208 | } | |
209 | def_temp = gimple_build_debug_source_bind (vexpr, val, NULL); | |
210 | DECL_ARTIFICIAL (vexpr) = 1; | |
211 | TREE_TYPE (vexpr) = TREE_TYPE (name); | |
212 | DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name)); | |
213 | gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR)); | |
214 | gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT); | |
215 | return vexpr; | |
216 | } | |
217 | ||
082ab5ff JJ |
218 | processing_debug_stmt = -1; |
219 | return name; | |
220 | } | |
221 | ||
70b5e7dc RG |
222 | /* Remap anonymous SSA names or SSA names of anonymous decls. */ |
223 | var = SSA_NAME_VAR (name); | |
224 | if (!var | |
225 | || (!SSA_NAME_IS_DEFAULT_DEF (name) | |
226 | && TREE_CODE (var) == VAR_DECL | |
227 | && !VAR_DECL_IS_VIRTUAL_OPERAND (var) | |
228 | && DECL_ARTIFICIAL (var) | |
229 | && DECL_IGNORED_P (var) | |
230 | && !DECL_NAME (var))) | |
231 | { | |
232 | struct ptr_info_def *pi; | |
233 | new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL); | |
234 | if (!var && SSA_NAME_IDENTIFIER (name)) | |
235 | SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name)); | |
236 | insert_decl_map (id, name, new_tree); | |
237 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree) | |
238 | = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name); | |
239 | /* At least IPA points-to info can be directly transferred. */ | |
240 | if (id->src_cfun->gimple_df | |
241 | && id->src_cfun->gimple_df->ipa_pta | |
242 | && (pi = SSA_NAME_PTR_INFO (name)) | |
243 | && !pi->pt.anything) | |
244 | { | |
245 | struct ptr_info_def *new_pi = get_ptr_info (new_tree); | |
246 | new_pi->pt = pi->pt; | |
247 | } | |
248 | return new_tree; | |
249 | } | |
250 | ||
110cfe1c JH |
251 | /* Do not set DEF_STMT yet as statement is not copied yet. We do that |
252 | in copy_bb. */ | |
70b5e7dc | 253 | new_tree = remap_decl (var, id); |
726a989a | 254 | |
110cfe1c | 255 | /* We might've substituted constant or another SSA_NAME for |
b8698a0f | 256 | the variable. |
110cfe1c JH |
257 | |
258 | Replace the SSA name representing RESULT_DECL by variable during | |
259 | inlining: this saves us from need to introduce PHI node in a case | |
260 | return value is just partly initialized. */ | |
82d6e6fc | 261 | if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL) |
70b5e7dc RG |
262 | && (!SSA_NAME_VAR (name) |
263 | || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL | |
110cfe1c JH |
264 | || !id->transform_return_to_modify)) |
265 | { | |
25a6a873 | 266 | struct ptr_info_def *pi; |
82d6e6fc KG |
267 | new_tree = make_ssa_name (new_tree, NULL); |
268 | insert_decl_map (id, name, new_tree); | |
269 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree) | |
110cfe1c | 270 | = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name); |
25a6a873 RG |
271 | /* At least IPA points-to info can be directly transferred. */ |
272 | if (id->src_cfun->gimple_df | |
273 | && id->src_cfun->gimple_df->ipa_pta | |
274 | && (pi = SSA_NAME_PTR_INFO (name)) | |
275 | && !pi->pt.anything) | |
276 | { | |
277 | struct ptr_info_def *new_pi = get_ptr_info (new_tree); | |
278 | new_pi->pt = pi->pt; | |
279 | } | |
32244553 | 280 | if (SSA_NAME_IS_DEFAULT_DEF (name)) |
045685a9 JH |
281 | { |
282 | /* By inlining function having uninitialized variable, we might | |
283 | extend the lifetime (variable might get reused). This cause | |
284 | ICE in the case we end up extending lifetime of SSA name across | |
fa10beec | 285 | abnormal edge, but also increase register pressure. |
045685a9 | 286 | |
726a989a RB |
287 | We simply initialize all uninitialized vars by 0 except |
288 | for case we are inlining to very first BB. We can avoid | |
289 | this for all BBs that are not inside strongly connected | |
290 | regions of the CFG, but this is expensive to test. */ | |
291 | if (id->entry_bb | |
dcad005d | 292 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name) |
70b5e7dc RG |
293 | && (!SSA_NAME_VAR (name) |
294 | || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL) | |
0723b99a | 295 | && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest |
045685a9 JH |
296 | || EDGE_COUNT (id->entry_bb->preds) != 1)) |
297 | { | |
726a989a RB |
298 | gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb); |
299 | gimple init_stmt; | |
e8160c9a | 300 | tree zero = build_zero_cst (TREE_TYPE (new_tree)); |
b8698a0f | 301 | |
e8160c9a | 302 | init_stmt = gimple_build_assign (new_tree, zero); |
726a989a | 303 | gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT); |
82d6e6fc | 304 | SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0; |
045685a9 JH |
305 | } |
306 | else | |
307 | { | |
82d6e6fc | 308 | SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop (); |
32244553 | 309 | set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree); |
045685a9 JH |
310 | } |
311 | } | |
110cfe1c JH |
312 | } |
313 | else | |
82d6e6fc KG |
314 | insert_decl_map (id, name, new_tree); |
315 | return new_tree; | |
110cfe1c JH |
316 | } |
317 | ||
e21aff8a | 318 | /* Remap DECL during the copying of the BLOCK tree for the function. */ |
d4e4baa9 | 319 | |
1b369fae RH |
320 | tree |
321 | remap_decl (tree decl, copy_body_data *id) | |
d4e4baa9 | 322 | { |
6be42dd4 | 323 | tree *n; |
e21aff8a SB |
324 | |
325 | /* We only remap local variables in the current function. */ | |
3c2a7a6a | 326 | |
e21aff8a SB |
327 | /* See if we have remapped this declaration. */ |
328 | ||
6be42dd4 | 329 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
e21aff8a | 330 | |
b5b8b0ac AO |
331 | if (!n && processing_debug_stmt) |
332 | { | |
333 | processing_debug_stmt = -1; | |
334 | return decl; | |
335 | } | |
336 | ||
e21aff8a SB |
337 | /* If we didn't already have an equivalent for this declaration, |
338 | create one now. */ | |
d4e4baa9 AO |
339 | if (!n) |
340 | { | |
d4e4baa9 | 341 | /* Make a copy of the variable or label. */ |
1b369fae | 342 | tree t = id->copy_decl (decl, id); |
b8698a0f | 343 | |
596b98ce AO |
344 | /* Remember it, so that if we encounter this local entity again |
345 | we can reuse this copy. Do this early because remap_type may | |
346 | need this decl for TYPE_STUB_DECL. */ | |
347 | insert_decl_map (id, decl, t); | |
348 | ||
1b369fae RH |
349 | if (!DECL_P (t)) |
350 | return t; | |
351 | ||
3c2a7a6a RH |
352 | /* Remap types, if necessary. */ |
353 | TREE_TYPE (t) = remap_type (TREE_TYPE (t), id); | |
354 | if (TREE_CODE (t) == TYPE_DECL) | |
355 | DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id); | |
3c2a7a6a RH |
356 | |
357 | /* Remap sizes as necessary. */ | |
726a989a RB |
358 | walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL); |
359 | walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL); | |
d4e4baa9 | 360 | |
8c27b7d4 | 361 | /* If fields, do likewise for offset and qualifier. */ |
5377d5ba RK |
362 | if (TREE_CODE (t) == FIELD_DECL) |
363 | { | |
726a989a | 364 | walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL); |
5377d5ba | 365 | if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE) |
726a989a | 366 | walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL); |
5377d5ba RK |
367 | } |
368 | ||
5e20bdd7 | 369 | return t; |
d4e4baa9 AO |
370 | } |
371 | ||
f82a627c EB |
372 | if (id->do_not_unshare) |
373 | return *n; | |
374 | else | |
375 | return unshare_expr (*n); | |
d4e4baa9 AO |
376 | } |
377 | ||
3c2a7a6a | 378 | static tree |
1b369fae | 379 | remap_type_1 (tree type, copy_body_data *id) |
3c2a7a6a | 380 | { |
82d6e6fc | 381 | tree new_tree, t; |
3c2a7a6a | 382 | |
ed397c43 RK |
383 | /* We do need a copy. build and register it now. If this is a pointer or |
384 | reference type, remap the designated type and make a new pointer or | |
385 | reference type. */ | |
386 | if (TREE_CODE (type) == POINTER_TYPE) | |
387 | { | |
82d6e6fc | 388 | new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id), |
ed397c43 RK |
389 | TYPE_MODE (type), |
390 | TYPE_REF_CAN_ALIAS_ALL (type)); | |
040c6d51 MM |
391 | if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type)) |
392 | new_tree = build_type_attribute_qual_variant (new_tree, | |
393 | TYPE_ATTRIBUTES (type), | |
394 | TYPE_QUALS (type)); | |
82d6e6fc KG |
395 | insert_decl_map (id, type, new_tree); |
396 | return new_tree; | |
ed397c43 RK |
397 | } |
398 | else if (TREE_CODE (type) == REFERENCE_TYPE) | |
399 | { | |
82d6e6fc | 400 | new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id), |
ed397c43 RK |
401 | TYPE_MODE (type), |
402 | TYPE_REF_CAN_ALIAS_ALL (type)); | |
040c6d51 MM |
403 | if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type)) |
404 | new_tree = build_type_attribute_qual_variant (new_tree, | |
405 | TYPE_ATTRIBUTES (type), | |
406 | TYPE_QUALS (type)); | |
82d6e6fc KG |
407 | insert_decl_map (id, type, new_tree); |
408 | return new_tree; | |
ed397c43 RK |
409 | } |
410 | else | |
82d6e6fc | 411 | new_tree = copy_node (type); |
ed397c43 | 412 | |
82d6e6fc | 413 | insert_decl_map (id, type, new_tree); |
3c2a7a6a RH |
414 | |
415 | /* This is a new type, not a copy of an old type. Need to reassociate | |
416 | variants. We can handle everything except the main variant lazily. */ | |
417 | t = TYPE_MAIN_VARIANT (type); | |
418 | if (type != t) | |
419 | { | |
420 | t = remap_type (t, id); | |
82d6e6fc KG |
421 | TYPE_MAIN_VARIANT (new_tree) = t; |
422 | TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t); | |
423 | TYPE_NEXT_VARIANT (t) = new_tree; | |
3c2a7a6a RH |
424 | } |
425 | else | |
426 | { | |
82d6e6fc KG |
427 | TYPE_MAIN_VARIANT (new_tree) = new_tree; |
428 | TYPE_NEXT_VARIANT (new_tree) = NULL; | |
3c2a7a6a RH |
429 | } |
430 | ||
596b98ce | 431 | if (TYPE_STUB_DECL (type)) |
82d6e6fc | 432 | TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id); |
596b98ce | 433 | |
3c2a7a6a | 434 | /* Lazily create pointer and reference types. */ |
82d6e6fc KG |
435 | TYPE_POINTER_TO (new_tree) = NULL; |
436 | TYPE_REFERENCE_TO (new_tree) = NULL; | |
3c2a7a6a | 437 | |
82d6e6fc | 438 | switch (TREE_CODE (new_tree)) |
3c2a7a6a RH |
439 | { |
440 | case INTEGER_TYPE: | |
441 | case REAL_TYPE: | |
325217ed | 442 | case FIXED_POINT_TYPE: |
3c2a7a6a RH |
443 | case ENUMERAL_TYPE: |
444 | case BOOLEAN_TYPE: | |
82d6e6fc | 445 | t = TYPE_MIN_VALUE (new_tree); |
3c2a7a6a | 446 | if (t && TREE_CODE (t) != INTEGER_CST) |
82d6e6fc | 447 | walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL); |
1c9766da | 448 | |
82d6e6fc | 449 | t = TYPE_MAX_VALUE (new_tree); |
3c2a7a6a | 450 | if (t && TREE_CODE (t) != INTEGER_CST) |
82d6e6fc KG |
451 | walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL); |
452 | return new_tree; | |
9f63daea | 453 | |
3c2a7a6a | 454 | case FUNCTION_TYPE: |
82d6e6fc KG |
455 | TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id); |
456 | walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL); | |
457 | return new_tree; | |
3c2a7a6a RH |
458 | |
459 | case ARRAY_TYPE: | |
82d6e6fc KG |
460 | TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id); |
461 | TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id); | |
3c2a7a6a RH |
462 | break; |
463 | ||
464 | case RECORD_TYPE: | |
465 | case UNION_TYPE: | |
466 | case QUAL_UNION_TYPE: | |
52dd234b RH |
467 | { |
468 | tree f, nf = NULL; | |
469 | ||
910ad8de | 470 | for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f)) |
52dd234b RH |
471 | { |
472 | t = remap_decl (f, id); | |
82d6e6fc | 473 | DECL_CONTEXT (t) = new_tree; |
910ad8de | 474 | DECL_CHAIN (t) = nf; |
52dd234b RH |
475 | nf = t; |
476 | } | |
82d6e6fc | 477 | TYPE_FIELDS (new_tree) = nreverse (nf); |
52dd234b | 478 | } |
3c2a7a6a RH |
479 | break; |
480 | ||
3c2a7a6a RH |
481 | case OFFSET_TYPE: |
482 | default: | |
483 | /* Shouldn't have been thought variable sized. */ | |
1e128c5f | 484 | gcc_unreachable (); |
3c2a7a6a RH |
485 | } |
486 | ||
82d6e6fc KG |
487 | walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL); |
488 | walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL); | |
3c2a7a6a | 489 | |
82d6e6fc | 490 | return new_tree; |
3c2a7a6a RH |
491 | } |
492 | ||
1b369fae RH |
493 | tree |
494 | remap_type (tree type, copy_body_data *id) | |
52dd234b | 495 | { |
6be42dd4 | 496 | tree *node; |
4f5c64b8 | 497 | tree tmp; |
52dd234b RH |
498 | |
499 | if (type == NULL) | |
500 | return type; | |
501 | ||
502 | /* See if we have remapped this type. */ | |
6be42dd4 | 503 | node = (tree *) pointer_map_contains (id->decl_map, type); |
52dd234b | 504 | if (node) |
6be42dd4 | 505 | return *node; |
52dd234b RH |
506 | |
507 | /* The type only needs remapping if it's variably modified. */ | |
1b369fae | 508 | if (! variably_modified_type_p (type, id->src_fn)) |
52dd234b RH |
509 | { |
510 | insert_decl_map (id, type, type); | |
511 | return type; | |
512 | } | |
513 | ||
4f5c64b8 RG |
514 | id->remapping_type_depth++; |
515 | tmp = remap_type_1 (type, id); | |
516 | id->remapping_type_depth--; | |
517 | ||
518 | return tmp; | |
52dd234b RH |
519 | } |
520 | ||
526d73ab | 521 | /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */ |
b8698a0f | 522 | |
526d73ab JH |
523 | static bool |
524 | can_be_nonlocal (tree decl, copy_body_data *id) | |
525 | { | |
526 | /* We can not duplicate function decls. */ | |
527 | if (TREE_CODE (decl) == FUNCTION_DECL) | |
528 | return true; | |
529 | ||
530 | /* Local static vars must be non-local or we get multiple declaration | |
531 | problems. */ | |
532 | if (TREE_CODE (decl) == VAR_DECL | |
533 | && !auto_var_in_fn_p (decl, id->src_fn)) | |
534 | return true; | |
535 | ||
5f564b8f | 536 | return false; |
526d73ab JH |
537 | } |
538 | ||
6de9cd9a | 539 | static tree |
8318d4ce | 540 | remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list, |
9771b263 | 541 | copy_body_data *id) |
d4e4baa9 | 542 | { |
6de9cd9a DN |
543 | tree old_var; |
544 | tree new_decls = NULL_TREE; | |
d4e4baa9 | 545 | |
6de9cd9a | 546 | /* Remap its variables. */ |
910ad8de | 547 | for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var)) |
d4e4baa9 | 548 | { |
6de9cd9a DN |
549 | tree new_var; |
550 | ||
526d73ab | 551 | if (can_be_nonlocal (old_var, id)) |
30be951a | 552 | { |
5f564b8f MM |
553 | /* We need to add this variable to the local decls as otherwise |
554 | nothing else will do so. */ | |
526d73ab | 555 | if (TREE_CODE (old_var) == VAR_DECL |
5f564b8f | 556 | && ! DECL_EXTERNAL (old_var)) |
c021f10b | 557 | add_local_decl (cfun, old_var); |
9e6aced0 | 558 | if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE) |
526d73ab JH |
559 | && !DECL_IGNORED_P (old_var) |
560 | && nonlocalized_list) | |
8318d4ce | 561 | vec_safe_push (*nonlocalized_list, old_var); |
30be951a JH |
562 | continue; |
563 | } | |
564 | ||
6de9cd9a DN |
565 | /* Remap the variable. */ |
566 | new_var = remap_decl (old_var, id); | |
567 | ||
726a989a | 568 | /* If we didn't remap this variable, we can't mess with its |
6de9cd9a DN |
569 | TREE_CHAIN. If we remapped this variable to the return slot, it's |
570 | already declared somewhere else, so don't declare it here. */ | |
b8698a0f | 571 | |
526d73ab | 572 | if (new_var == id->retvar) |
6de9cd9a | 573 | ; |
526d73ab JH |
574 | else if (!new_var) |
575 | { | |
9e6aced0 | 576 | if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE) |
526d73ab JH |
577 | && !DECL_IGNORED_P (old_var) |
578 | && nonlocalized_list) | |
8318d4ce | 579 | vec_safe_push (*nonlocalized_list, old_var); |
526d73ab | 580 | } |
d4e4baa9 AO |
581 | else |
582 | { | |
1e128c5f | 583 | gcc_assert (DECL_P (new_var)); |
910ad8de | 584 | DECL_CHAIN (new_var) = new_decls; |
6de9cd9a | 585 | new_decls = new_var; |
60a5d78a JJ |
586 | |
587 | /* Also copy value-expressions. */ | |
588 | if (TREE_CODE (new_var) == VAR_DECL | |
589 | && DECL_HAS_VALUE_EXPR_P (new_var)) | |
590 | { | |
591 | tree tem = DECL_VALUE_EXPR (new_var); | |
592 | bool old_regimplify = id->regimplify; | |
593 | id->remapping_type_depth++; | |
594 | walk_tree (&tem, copy_tree_body_r, id, NULL); | |
595 | id->remapping_type_depth--; | |
596 | id->regimplify = old_regimplify; | |
597 | SET_DECL_VALUE_EXPR (new_var, tem); | |
598 | } | |
d4e4baa9 | 599 | } |
d4e4baa9 | 600 | } |
d4e4baa9 | 601 | |
6de9cd9a DN |
602 | return nreverse (new_decls); |
603 | } | |
604 | ||
605 | /* Copy the BLOCK to contain remapped versions of the variables | |
606 | therein. And hook the new block into the block-tree. */ | |
607 | ||
608 | static void | |
1b369fae | 609 | remap_block (tree *block, copy_body_data *id) |
6de9cd9a | 610 | { |
d436bff8 AH |
611 | tree old_block; |
612 | tree new_block; | |
d436bff8 AH |
613 | |
614 | /* Make the new block. */ | |
615 | old_block = *block; | |
616 | new_block = make_node (BLOCK); | |
617 | TREE_USED (new_block) = TREE_USED (old_block); | |
618 | BLOCK_ABSTRACT_ORIGIN (new_block) = old_block; | |
3e2844cb | 619 | BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block); |
526d73ab | 620 | BLOCK_NONLOCALIZED_VARS (new_block) |
9771b263 | 621 | = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block)); |
d436bff8 AH |
622 | *block = new_block; |
623 | ||
624 | /* Remap its variables. */ | |
526d73ab | 625 | BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), |
8318d4ce | 626 | &BLOCK_NONLOCALIZED_VARS (new_block), |
526d73ab | 627 | id); |
d436bff8 | 628 | |
1b369fae | 629 | if (id->transform_lang_insert_block) |
9ff420f1 | 630 | id->transform_lang_insert_block (new_block); |
1b369fae | 631 | |
d436bff8 | 632 | /* Remember the remapped block. */ |
6de9cd9a | 633 | insert_decl_map (id, old_block, new_block); |
d4e4baa9 AO |
634 | } |
635 | ||
acb8f212 JH |
636 | /* Copy the whole block tree and root it in id->block. */ |
637 | static tree | |
1b369fae | 638 | remap_blocks (tree block, copy_body_data *id) |
acb8f212 JH |
639 | { |
640 | tree t; | |
82d6e6fc | 641 | tree new_tree = block; |
acb8f212 JH |
642 | |
643 | if (!block) | |
644 | return NULL; | |
645 | ||
82d6e6fc KG |
646 | remap_block (&new_tree, id); |
647 | gcc_assert (new_tree != block); | |
acb8f212 | 648 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) |
4a283090 JH |
649 | prepend_lexical_block (new_tree, remap_blocks (t, id)); |
650 | /* Blocks are in arbitrary order, but make things slightly prettier and do | |
651 | not swap order when producing a copy. */ | |
652 | BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree)); | |
82d6e6fc | 653 | return new_tree; |
acb8f212 JH |
654 | } |
655 | ||
3e492e9c RB |
656 | /* Remap the block tree rooted at BLOCK to nothing. */ |
657 | static void | |
658 | remap_blocks_to_null (tree block, copy_body_data *id) | |
659 | { | |
660 | tree t; | |
661 | insert_decl_map (id, block, NULL_TREE); | |
662 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | |
663 | remap_blocks_to_null (t, id); | |
664 | } | |
665 | ||
d4e4baa9 | 666 | static void |
6de9cd9a | 667 | copy_statement_list (tree *tp) |
d4e4baa9 | 668 | { |
6de9cd9a | 669 | tree_stmt_iterator oi, ni; |
82d6e6fc | 670 | tree new_tree; |
6de9cd9a | 671 | |
82d6e6fc KG |
672 | new_tree = alloc_stmt_list (); |
673 | ni = tsi_start (new_tree); | |
6de9cd9a | 674 | oi = tsi_start (*tp); |
b1d82db0 | 675 | TREE_TYPE (new_tree) = TREE_TYPE (*tp); |
82d6e6fc | 676 | *tp = new_tree; |
6de9cd9a DN |
677 | |
678 | for (; !tsi_end_p (oi); tsi_next (&oi)) | |
a406865a RG |
679 | { |
680 | tree stmt = tsi_stmt (oi); | |
62e36382 JM |
681 | if (TREE_CODE (stmt) == STATEMENT_LIST) |
682 | /* This copy is not redundant; tsi_link_after will smash this | |
683 | STATEMENT_LIST into the end of the one we're building, and we | |
684 | don't want to do that with the original. */ | |
685 | copy_statement_list (&stmt); | |
a406865a RG |
686 | tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING); |
687 | } | |
6de9cd9a | 688 | } |
d4e4baa9 | 689 | |
6de9cd9a | 690 | static void |
1b369fae | 691 | copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id) |
6de9cd9a DN |
692 | { |
693 | tree block = BIND_EXPR_BLOCK (*tp); | |
d4e4baa9 AO |
694 | /* Copy (and replace) the statement. */ |
695 | copy_tree_r (tp, walk_subtrees, NULL); | |
6de9cd9a DN |
696 | if (block) |
697 | { | |
698 | remap_block (&block, id); | |
699 | BIND_EXPR_BLOCK (*tp) = block; | |
700 | } | |
d4e4baa9 | 701 | |
6de9cd9a | 702 | if (BIND_EXPR_VARS (*tp)) |
60a5d78a JJ |
703 | /* This will remap a lot of the same decls again, but this should be |
704 | harmless. */ | |
705 | BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id); | |
d4e4baa9 AO |
706 | } |
707 | ||
726a989a RB |
708 | |
709 | /* Create a new gimple_seq by remapping all the statements in BODY | |
710 | using the inlining information in ID. */ | |
711 | ||
b34fd25c | 712 | static gimple_seq |
726a989a RB |
713 | remap_gimple_seq (gimple_seq body, copy_body_data *id) |
714 | { | |
715 | gimple_stmt_iterator si; | |
716 | gimple_seq new_body = NULL; | |
717 | ||
718 | for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si)) | |
719 | { | |
720 | gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id); | |
721 | gimple_seq_add_stmt (&new_body, new_stmt); | |
722 | } | |
723 | ||
724 | return new_body; | |
725 | } | |
726 | ||
727 | ||
728 | /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its | |
729 | block using the mapping information in ID. */ | |
730 | ||
731 | static gimple | |
732 | copy_gimple_bind (gimple stmt, copy_body_data *id) | |
733 | { | |
734 | gimple new_bind; | |
735 | tree new_block, new_vars; | |
736 | gimple_seq body, new_body; | |
737 | ||
738 | /* Copy the statement. Note that we purposely don't use copy_stmt | |
739 | here because we need to remap statements as we copy. */ | |
740 | body = gimple_bind_body (stmt); | |
741 | new_body = remap_gimple_seq (body, id); | |
742 | ||
743 | new_block = gimple_bind_block (stmt); | |
744 | if (new_block) | |
745 | remap_block (&new_block, id); | |
746 | ||
747 | /* This will remap a lot of the same decls again, but this should be | |
748 | harmless. */ | |
749 | new_vars = gimple_bind_vars (stmt); | |
750 | if (new_vars) | |
526d73ab | 751 | new_vars = remap_decls (new_vars, NULL, id); |
726a989a RB |
752 | |
753 | new_bind = gimple_build_bind (new_vars, new_body, new_block); | |
754 | ||
755 | return new_bind; | |
756 | } | |
757 | ||
78bbd765 EB |
758 | /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */ |
759 | ||
760 | static bool | |
761 | is_parm (tree decl) | |
762 | { | |
763 | if (TREE_CODE (decl) == SSA_NAME) | |
764 | { | |
765 | decl = SSA_NAME_VAR (decl); | |
766 | if (!decl) | |
767 | return false; | |
768 | } | |
769 | ||
770 | return (TREE_CODE (decl) == PARM_DECL); | |
771 | } | |
726a989a RB |
772 | |
773 | /* Remap the GIMPLE operand pointed to by *TP. DATA is really a | |
774 | 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'. | |
775 | WALK_SUBTREES is used to indicate walk_gimple_op whether to keep | |
776 | recursing into the children nodes of *TP. */ | |
777 | ||
778 | static tree | |
779 | remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data) | |
780 | { | |
781 | struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data; | |
782 | copy_body_data *id = (copy_body_data *) wi_p->info; | |
783 | tree fn = id->src_fn; | |
784 | ||
785 | if (TREE_CODE (*tp) == SSA_NAME) | |
786 | { | |
787 | *tp = remap_ssa_name (*tp, id); | |
788 | *walk_subtrees = 0; | |
789 | return NULL; | |
790 | } | |
791 | else if (auto_var_in_fn_p (*tp, fn)) | |
792 | { | |
793 | /* Local variables and labels need to be replaced by equivalent | |
794 | variables. We don't want to copy static variables; there's | |
795 | only one of those, no matter how many times we inline the | |
796 | containing function. Similarly for globals from an outer | |
797 | function. */ | |
798 | tree new_decl; | |
799 | ||
800 | /* Remap the declaration. */ | |
801 | new_decl = remap_decl (*tp, id); | |
802 | gcc_assert (new_decl); | |
803 | /* Replace this variable with the copy. */ | |
804 | STRIP_TYPE_NOPS (new_decl); | |
211ca15c RG |
805 | /* ??? The C++ frontend uses void * pointer zero to initialize |
806 | any other type. This confuses the middle-end type verification. | |
807 | As cloned bodies do not go through gimplification again the fixup | |
808 | there doesn't trigger. */ | |
809 | if (TREE_CODE (new_decl) == INTEGER_CST | |
810 | && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl))) | |
811 | new_decl = fold_convert (TREE_TYPE (*tp), new_decl); | |
726a989a RB |
812 | *tp = new_decl; |
813 | *walk_subtrees = 0; | |
814 | } | |
815 | else if (TREE_CODE (*tp) == STATEMENT_LIST) | |
816 | gcc_unreachable (); | |
817 | else if (TREE_CODE (*tp) == SAVE_EXPR) | |
818 | gcc_unreachable (); | |
819 | else if (TREE_CODE (*tp) == LABEL_DECL | |
820 | && (!DECL_CONTEXT (*tp) | |
821 | || decl_function_context (*tp) == id->src_fn)) | |
822 | /* These may need to be remapped for EH handling. */ | |
823 | *tp = remap_decl (*tp, id); | |
37c59e69 EB |
824 | else if (TREE_CODE (*tp) == FIELD_DECL) |
825 | { | |
826 | /* If the enclosing record type is variably_modified_type_p, the field | |
827 | has already been remapped. Otherwise, it need not be. */ | |
828 | tree *n = (tree *) pointer_map_contains (id->decl_map, *tp); | |
829 | if (n) | |
830 | *tp = *n; | |
831 | *walk_subtrees = 0; | |
832 | } | |
726a989a RB |
833 | else if (TYPE_P (*tp)) |
834 | /* Types may need remapping as well. */ | |
835 | *tp = remap_type (*tp, id); | |
836 | else if (CONSTANT_CLASS_P (*tp)) | |
837 | { | |
838 | /* If this is a constant, we have to copy the node iff the type | |
839 | will be remapped. copy_tree_r will not copy a constant. */ | |
840 | tree new_type = remap_type (TREE_TYPE (*tp), id); | |
841 | ||
842 | if (new_type == TREE_TYPE (*tp)) | |
843 | *walk_subtrees = 0; | |
844 | ||
845 | else if (TREE_CODE (*tp) == INTEGER_CST) | |
846 | *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp), | |
847 | TREE_INT_CST_HIGH (*tp)); | |
848 | else | |
849 | { | |
850 | *tp = copy_node (*tp); | |
851 | TREE_TYPE (*tp) = new_type; | |
852 | } | |
853 | } | |
854 | else | |
855 | { | |
856 | /* Otherwise, just copy the node. Note that copy_tree_r already | |
857 | knows not to copy VAR_DECLs, etc., so this is safe. */ | |
41a58a92 | 858 | |
70f34814 | 859 | if (TREE_CODE (*tp) == MEM_REF) |
726a989a | 860 | { |
70f34814 | 861 | /* We need to re-canonicalize MEM_REFs from inline substitutions |
93e452ed RG |
862 | that can happen when a pointer argument is an ADDR_EXPR. |
863 | Recurse here manually to allow that. */ | |
78bbd765 EB |
864 | tree ptr = TREE_OPERAND (*tp, 0); |
865 | tree type = remap_type (TREE_TYPE (*tp), id); | |
866 | tree old = *tp; | |
93e452ed | 867 | walk_tree (&ptr, remap_gimple_op_r, data, NULL); |
78bbd765 | 868 | *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1)); |
0de204de | 869 | TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); |
31a47f1a | 870 | TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old); |
93e452ed | 871 | TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old); |
78bbd765 EB |
872 | /* We cannot propagate the TREE_THIS_NOTRAP flag if we have |
873 | remapped a parameter as the property might be valid only | |
874 | for the parameter itself. */ | |
875 | if (TREE_THIS_NOTRAP (old) | |
876 | && (!is_parm (TREE_OPERAND (old, 0)) | |
877 | || (!id->transform_parameter && is_parm (ptr)))) | |
878 | TREE_THIS_NOTRAP (*tp) = 1; | |
93e452ed RG |
879 | *walk_subtrees = 0; |
880 | return NULL; | |
726a989a RB |
881 | } |
882 | ||
883 | /* Here is the "usual case". Copy this tree node, and then | |
884 | tweak some special cases. */ | |
885 | copy_tree_r (tp, walk_subtrees, NULL); | |
886 | ||
41a58a92 RG |
887 | if (TREE_CODE (*tp) != OMP_CLAUSE) |
888 | TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id); | |
889 | ||
726a989a RB |
890 | if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3)) |
891 | { | |
892 | /* The copied TARGET_EXPR has never been expanded, even if the | |
893 | original node was expanded already. */ | |
894 | TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3); | |
895 | TREE_OPERAND (*tp, 3) = NULL_TREE; | |
896 | } | |
897 | else if (TREE_CODE (*tp) == ADDR_EXPR) | |
898 | { | |
899 | /* Variable substitution need not be simple. In particular, | |
70f34814 | 900 | the MEM_REF substitution above. Make sure that |
5368224f | 901 | TREE_CONSTANT and friends are up-to-date. */ |
726a989a | 902 | int invariant = is_gimple_min_invariant (*tp); |
f1071b12 | 903 | walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL); |
70f34814 | 904 | recompute_tree_invariant_for_addr_expr (*tp); |
726a989a RB |
905 | |
906 | /* If this used to be invariant, but is not any longer, | |
907 | then regimplification is probably needed. */ | |
908 | if (invariant && !is_gimple_min_invariant (*tp)) | |
909 | id->regimplify = true; | |
910 | ||
911 | *walk_subtrees = 0; | |
912 | } | |
913 | } | |
914 | ||
5368224f DC |
915 | /* Update the TREE_BLOCK for the cloned expr. */ |
916 | if (EXPR_P (*tp)) | |
917 | { | |
918 | tree new_block = id->remapping_type_depth == 0 ? id->block : NULL; | |
919 | tree old_block = TREE_BLOCK (*tp); | |
920 | if (old_block) | |
921 | { | |
922 | tree *n; | |
923 | n = (tree *) pointer_map_contains (id->decl_map, | |
924 | TREE_BLOCK (*tp)); | |
925 | if (n) | |
926 | new_block = *n; | |
927 | } | |
928 | TREE_SET_BLOCK (*tp, new_block); | |
929 | } | |
930 | ||
726a989a RB |
931 | /* Keep iterating. */ |
932 | return NULL_TREE; | |
933 | } | |
934 | ||
935 | ||
936 | /* Called from copy_body_id via walk_tree. DATA is really a | |
1b369fae | 937 | `copy_body_data *'. */ |
aa4a53af | 938 | |
1b369fae | 939 | tree |
726a989a | 940 | copy_tree_body_r (tree *tp, int *walk_subtrees, void *data) |
d4e4baa9 | 941 | { |
1b369fae RH |
942 | copy_body_data *id = (copy_body_data *) data; |
943 | tree fn = id->src_fn; | |
acb8f212 | 944 | tree new_block; |
d4e4baa9 | 945 | |
e21aff8a SB |
946 | /* Begin by recognizing trees that we'll completely rewrite for the |
947 | inlining context. Our output for these trees is completely | |
948 | different from out input (e.g. RETURN_EXPR is deleted, and morphs | |
949 | into an edge). Further down, we'll handle trees that get | |
950 | duplicated and/or tweaked. */ | |
d4e4baa9 | 951 | |
1b369fae | 952 | /* When requested, RETURN_EXPRs should be transformed to just the |
726a989a | 953 | contained MODIFY_EXPR. The branch semantics of the return will |
1b369fae RH |
954 | be handled elsewhere by manipulating the CFG rather than a statement. */ |
955 | if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify) | |
d4e4baa9 | 956 | { |
e21aff8a | 957 | tree assignment = TREE_OPERAND (*tp, 0); |
d4e4baa9 AO |
958 | |
959 | /* If we're returning something, just turn that into an | |
e21aff8a SB |
960 | assignment into the equivalent of the original RESULT_DECL. |
961 | If the "assignment" is just the result decl, the result | |
962 | decl has already been set (e.g. a recent "foo (&result_decl, | |
963 | ...)"); just toss the entire RETURN_EXPR. */ | |
726a989a | 964 | if (assignment && TREE_CODE (assignment) == MODIFY_EXPR) |
e21aff8a SB |
965 | { |
966 | /* Replace the RETURN_EXPR with (a copy of) the | |
726a989a | 967 | MODIFY_EXPR hanging underneath. */ |
e21aff8a SB |
968 | *tp = copy_node (assignment); |
969 | } | |
970 | else /* Else the RETURN_EXPR returns no value. */ | |
971 | { | |
972 | *tp = NULL; | |
cceb1885 | 973 | return (tree) (void *)1; |
e21aff8a | 974 | } |
d4e4baa9 | 975 | } |
110cfe1c JH |
976 | else if (TREE_CODE (*tp) == SSA_NAME) |
977 | { | |
978 | *tp = remap_ssa_name (*tp, id); | |
979 | *walk_subtrees = 0; | |
980 | return NULL; | |
981 | } | |
e21aff8a | 982 | |
d4e4baa9 AO |
983 | /* Local variables and labels need to be replaced by equivalent |
984 | variables. We don't want to copy static variables; there's only | |
985 | one of those, no matter how many times we inline the containing | |
5377d5ba | 986 | function. Similarly for globals from an outer function. */ |
50886bf1 | 987 | else if (auto_var_in_fn_p (*tp, fn)) |
d4e4baa9 AO |
988 | { |
989 | tree new_decl; | |
990 | ||
991 | /* Remap the declaration. */ | |
992 | new_decl = remap_decl (*tp, id); | |
1e128c5f | 993 | gcc_assert (new_decl); |
d4e4baa9 AO |
994 | /* Replace this variable with the copy. */ |
995 | STRIP_TYPE_NOPS (new_decl); | |
996 | *tp = new_decl; | |
e4cf29ae | 997 | *walk_subtrees = 0; |
d4e4baa9 | 998 | } |
6de9cd9a DN |
999 | else if (TREE_CODE (*tp) == STATEMENT_LIST) |
1000 | copy_statement_list (tp); | |
a406865a RG |
1001 | else if (TREE_CODE (*tp) == SAVE_EXPR |
1002 | || TREE_CODE (*tp) == TARGET_EXPR) | |
82c82743 | 1003 | remap_save_expr (tp, id->decl_map, walk_subtrees); |
17acc01a JH |
1004 | else if (TREE_CODE (*tp) == LABEL_DECL |
1005 | && (! DECL_CONTEXT (*tp) | |
1b369fae | 1006 | || decl_function_context (*tp) == id->src_fn)) |
e21aff8a | 1007 | /* These may need to be remapped for EH handling. */ |
17acc01a | 1008 | *tp = remap_decl (*tp, id); |
6de9cd9a DN |
1009 | else if (TREE_CODE (*tp) == BIND_EXPR) |
1010 | copy_bind_expr (tp, walk_subtrees, id); | |
3c2a7a6a RH |
1011 | /* Types may need remapping as well. */ |
1012 | else if (TYPE_P (*tp)) | |
1013 | *tp = remap_type (*tp, id); | |
1014 | ||
bb04998a RK |
1015 | /* If this is a constant, we have to copy the node iff the type will be |
1016 | remapped. copy_tree_r will not copy a constant. */ | |
3cf11075 | 1017 | else if (CONSTANT_CLASS_P (*tp)) |
bb04998a RK |
1018 | { |
1019 | tree new_type = remap_type (TREE_TYPE (*tp), id); | |
1020 | ||
1021 | if (new_type == TREE_TYPE (*tp)) | |
1022 | *walk_subtrees = 0; | |
1023 | ||
1024 | else if (TREE_CODE (*tp) == INTEGER_CST) | |
1025 | *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp), | |
1026 | TREE_INT_CST_HIGH (*tp)); | |
1027 | else | |
1028 | { | |
1029 | *tp = copy_node (*tp); | |
1030 | TREE_TYPE (*tp) = new_type; | |
1031 | } | |
1032 | } | |
1033 | ||
d4e4baa9 AO |
1034 | /* Otherwise, just copy the node. Note that copy_tree_r already |
1035 | knows not to copy VAR_DECLs, etc., so this is safe. */ | |
1036 | else | |
1037 | { | |
e21aff8a SB |
1038 | /* Here we handle trees that are not completely rewritten. |
1039 | First we detect some inlining-induced bogosities for | |
1040 | discarding. */ | |
726a989a RB |
1041 | if (TREE_CODE (*tp) == MODIFY_EXPR |
1042 | && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1) | |
1043 | && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn))) | |
d4e4baa9 AO |
1044 | { |
1045 | /* Some assignments VAR = VAR; don't generate any rtl code | |
1046 | and thus don't count as variable modification. Avoid | |
1047 | keeping bogosities like 0 = 0. */ | |
726a989a | 1048 | tree decl = TREE_OPERAND (*tp, 0), value; |
6be42dd4 | 1049 | tree *n; |
d4e4baa9 | 1050 | |
6be42dd4 | 1051 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
d4e4baa9 AO |
1052 | if (n) |
1053 | { | |
6be42dd4 | 1054 | value = *n; |
d4e4baa9 | 1055 | STRIP_TYPE_NOPS (value); |
becfd6e5 | 1056 | if (TREE_CONSTANT (value) || TREE_READONLY (value)) |
68594ce7 | 1057 | { |
c2255bc4 | 1058 | *tp = build_empty_stmt (EXPR_LOCATION (*tp)); |
726a989a | 1059 | return copy_tree_body_r (tp, walk_subtrees, data); |
68594ce7 | 1060 | } |
d4e4baa9 AO |
1061 | } |
1062 | } | |
1b369fae | 1063 | else if (TREE_CODE (*tp) == INDIRECT_REF) |
6de9cd9a DN |
1064 | { |
1065 | /* Get rid of *& from inline substitutions that can happen when a | |
1066 | pointer argument is an ADDR_EXPR. */ | |
81cfbbc2 | 1067 | tree decl = TREE_OPERAND (*tp, 0); |
78bbd765 | 1068 | tree *n = (tree *) pointer_map_contains (id->decl_map, decl); |
6de9cd9a DN |
1069 | if (n) |
1070 | { | |
30d2e943 RG |
1071 | /* If we happen to get an ADDR_EXPR in n->value, strip |
1072 | it manually here as we'll eventually get ADDR_EXPRs | |
1073 | which lie about their types pointed to. In this case | |
1074 | build_fold_indirect_ref wouldn't strip the INDIRECT_REF, | |
095ecc24 RG |
1075 | but we absolutely rely on that. As fold_indirect_ref |
1076 | does other useful transformations, try that first, though. */ | |
78bbd765 EB |
1077 | tree type = TREE_TYPE (*tp); |
1078 | tree ptr = id->do_not_unshare ? *n : unshare_expr (*n); | |
1079 | tree old = *tp; | |
1080 | *tp = gimple_fold_indirect_ref (ptr); | |
095ecc24 RG |
1081 | if (! *tp) |
1082 | { | |
78bbd765 | 1083 | if (TREE_CODE (ptr) == ADDR_EXPR) |
de4af523 | 1084 | { |
78bbd765 EB |
1085 | *tp |
1086 | = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr); | |
de4af523 JJ |
1087 | /* ??? We should either assert here or build |
1088 | a VIEW_CONVERT_EXPR instead of blindly leaking | |
1089 | incompatible types to our IL. */ | |
1090 | if (! *tp) | |
78bbd765 | 1091 | *tp = TREE_OPERAND (ptr, 0); |
de4af523 | 1092 | } |
095ecc24 | 1093 | else |
d84b37b0 | 1094 | { |
78bbd765 | 1095 | *tp = build1 (INDIRECT_REF, type, ptr); |
d84b37b0 | 1096 | TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); |
955f6531 | 1097 | TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old); |
a61c3633 | 1098 | TREE_READONLY (*tp) = TREE_READONLY (old); |
78bbd765 EB |
1099 | /* We cannot propagate the TREE_THIS_NOTRAP flag if we |
1100 | have remapped a parameter as the property might be | |
1101 | valid only for the parameter itself. */ | |
1102 | if (TREE_THIS_NOTRAP (old) | |
1103 | && (!is_parm (TREE_OPERAND (old, 0)) | |
1104 | || (!id->transform_parameter && is_parm (ptr)))) | |
1105 | TREE_THIS_NOTRAP (*tp) = 1; | |
d84b37b0 | 1106 | } |
095ecc24 | 1107 | } |
81cfbbc2 JH |
1108 | *walk_subtrees = 0; |
1109 | return NULL; | |
68594ce7 JM |
1110 | } |
1111 | } | |
70f34814 RG |
1112 | else if (TREE_CODE (*tp) == MEM_REF) |
1113 | { | |
54714c68 RB |
1114 | /* We need to re-canonicalize MEM_REFs from inline substitutions |
1115 | that can happen when a pointer argument is an ADDR_EXPR. | |
1116 | Recurse here manually to allow that. */ | |
78bbd765 EB |
1117 | tree ptr = TREE_OPERAND (*tp, 0); |
1118 | tree type = remap_type (TREE_TYPE (*tp), id); | |
1119 | tree old = *tp; | |
54714c68 | 1120 | walk_tree (&ptr, copy_tree_body_r, data, NULL); |
78bbd765 | 1121 | *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1)); |
54714c68 RB |
1122 | TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); |
1123 | TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old); | |
1124 | TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old); | |
78bbd765 EB |
1125 | /* We cannot propagate the TREE_THIS_NOTRAP flag if we have |
1126 | remapped a parameter as the property might be valid only | |
1127 | for the parameter itself. */ | |
1128 | if (TREE_THIS_NOTRAP (old) | |
1129 | && (!is_parm (TREE_OPERAND (old, 0)) | |
1130 | || (!id->transform_parameter && is_parm (ptr)))) | |
1131 | TREE_THIS_NOTRAP (*tp) = 1; | |
54714c68 RB |
1132 | *walk_subtrees = 0; |
1133 | return NULL; | |
70f34814 | 1134 | } |
68594ce7 | 1135 | |
e21aff8a SB |
1136 | /* Here is the "usual case". Copy this tree node, and then |
1137 | tweak some special cases. */ | |
1b369fae | 1138 | copy_tree_r (tp, walk_subtrees, NULL); |
110cfe1c | 1139 | |
acb8f212 JH |
1140 | /* If EXPR has block defined, map it to newly constructed block. |
1141 | When inlining we want EXPRs without block appear in the block | |
ee0192a2 | 1142 | of function call if we are not remapping a type. */ |
726a989a | 1143 | if (EXPR_P (*tp)) |
acb8f212 | 1144 | { |
ee0192a2 | 1145 | new_block = id->remapping_type_depth == 0 ? id->block : NULL; |
acb8f212 JH |
1146 | if (TREE_BLOCK (*tp)) |
1147 | { | |
6be42dd4 RG |
1148 | tree *n; |
1149 | n = (tree *) pointer_map_contains (id->decl_map, | |
1150 | TREE_BLOCK (*tp)); | |
60a5d78a JJ |
1151 | if (n) |
1152 | new_block = *n; | |
acb8f212 | 1153 | } |
5368224f | 1154 | TREE_SET_BLOCK (*tp, new_block); |
acb8f212 | 1155 | } |
68594ce7 | 1156 | |
726a989a | 1157 | if (TREE_CODE (*tp) != OMP_CLAUSE) |
07beea0d | 1158 | TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id); |
3c2a7a6a | 1159 | |
68594ce7 JM |
1160 | /* The copied TARGET_EXPR has never been expanded, even if the |
1161 | original node was expanded already. */ | |
1162 | if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3)) | |
1163 | { | |
1164 | TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3); | |
1165 | TREE_OPERAND (*tp, 3) = NULL_TREE; | |
1166 | } | |
84cce55d RH |
1167 | |
1168 | /* Variable substitution need not be simple. In particular, the | |
1169 | INDIRECT_REF substitution above. Make sure that TREE_CONSTANT | |
1170 | and friends are up-to-date. */ | |
1171 | else if (TREE_CODE (*tp) == ADDR_EXPR) | |
1172 | { | |
ad6003f2 | 1173 | int invariant = is_gimple_min_invariant (*tp); |
726a989a RB |
1174 | walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL); |
1175 | ||
8e85fd14 RG |
1176 | /* Handle the case where we substituted an INDIRECT_REF |
1177 | into the operand of the ADDR_EXPR. */ | |
1178 | if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF) | |
1179 | *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0); | |
1180 | else | |
1181 | recompute_tree_invariant_for_addr_expr (*tp); | |
726a989a | 1182 | |
416c991f JJ |
1183 | /* If this used to be invariant, but is not any longer, |
1184 | then regimplification is probably needed. */ | |
ad6003f2 | 1185 | if (invariant && !is_gimple_min_invariant (*tp)) |
416c991f | 1186 | id->regimplify = true; |
726a989a | 1187 | |
84cce55d RH |
1188 | *walk_subtrees = 0; |
1189 | } | |
d4e4baa9 AO |
1190 | } |
1191 | ||
1192 | /* Keep iterating. */ | |
1193 | return NULL_TREE; | |
1194 | } | |
1195 | ||
1d65f45c RH |
1196 | /* Helper for remap_gimple_stmt. Given an EH region number for the |
1197 | source function, map that to the duplicate EH region number in | |
1198 | the destination function. */ | |
1199 | ||
1200 | static int | |
1201 | remap_eh_region_nr (int old_nr, copy_body_data *id) | |
1202 | { | |
1203 | eh_region old_r, new_r; | |
1204 | void **slot; | |
1205 | ||
1206 | old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr); | |
1207 | slot = pointer_map_contains (id->eh_map, old_r); | |
1208 | new_r = (eh_region) *slot; | |
1209 | ||
1210 | return new_r->index; | |
1211 | } | |
1212 | ||
1213 | /* Similar, but operate on INTEGER_CSTs. */ | |
1214 | ||
1215 | static tree | |
1216 | remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id) | |
1217 | { | |
1218 | int old_nr, new_nr; | |
1219 | ||
1220 | old_nr = tree_low_cst (old_t_nr, 0); | |
1221 | new_nr = remap_eh_region_nr (old_nr, id); | |
1222 | ||
9f616812 | 1223 | return build_int_cst (integer_type_node, new_nr); |
1d65f45c | 1224 | } |
726a989a RB |
1225 | |
1226 | /* Helper for copy_bb. Remap statement STMT using the inlining | |
1227 | information in ID. Return the new statement copy. */ | |
1228 | ||
1229 | static gimple | |
1230 | remap_gimple_stmt (gimple stmt, copy_body_data *id) | |
1231 | { | |
1232 | gimple copy = NULL; | |
1233 | struct walk_stmt_info wi; | |
5a6e26b7 | 1234 | bool skip_first = false; |
726a989a RB |
1235 | |
1236 | /* Begin by recognizing trees that we'll completely rewrite for the | |
1237 | inlining context. Our output for these trees is completely | |
1238 | different from out input (e.g. RETURN_EXPR is deleted, and morphs | |
1239 | into an edge). Further down, we'll handle trees that get | |
1240 | duplicated and/or tweaked. */ | |
1241 | ||
1242 | /* When requested, GIMPLE_RETURNs should be transformed to just the | |
1243 | contained GIMPLE_ASSIGN. The branch semantics of the return will | |
1244 | be handled elsewhere by manipulating the CFG rather than the | |
1245 | statement. */ | |
1246 | if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify) | |
1247 | { | |
1248 | tree retval = gimple_return_retval (stmt); | |
1249 | ||
1250 | /* If we're returning something, just turn that into an | |
1251 | assignment into the equivalent of the original RESULT_DECL. | |
1252 | If RETVAL is just the result decl, the result decl has | |
1253 | already been set (e.g. a recent "foo (&result_decl, ...)"); | |
1254 | just toss the entire GIMPLE_RETURN. */ | |
6938f93f JH |
1255 | if (retval |
1256 | && (TREE_CODE (retval) != RESULT_DECL | |
1257 | && (TREE_CODE (retval) != SSA_NAME | |
70b5e7dc | 1258 | || ! SSA_NAME_VAR (retval) |
6938f93f | 1259 | || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL))) |
5a6e26b7 JH |
1260 | { |
1261 | copy = gimple_build_assign (id->retvar, retval); | |
1262 | /* id->retvar is already substituted. Skip it on later remapping. */ | |
1263 | skip_first = true; | |
1264 | } | |
726a989a RB |
1265 | else |
1266 | return gimple_build_nop (); | |
1267 | } | |
1268 | else if (gimple_has_substatements (stmt)) | |
1269 | { | |
1270 | gimple_seq s1, s2; | |
1271 | ||
1272 | /* When cloning bodies from the C++ front end, we will be handed bodies | |
1273 | in High GIMPLE form. Handle here all the High GIMPLE statements that | |
1274 | have embedded statements. */ | |
1275 | switch (gimple_code (stmt)) | |
1276 | { | |
1277 | case GIMPLE_BIND: | |
1278 | copy = copy_gimple_bind (stmt, id); | |
1279 | break; | |
1280 | ||
1281 | case GIMPLE_CATCH: | |
1282 | s1 = remap_gimple_seq (gimple_catch_handler (stmt), id); | |
1283 | copy = gimple_build_catch (gimple_catch_types (stmt), s1); | |
1284 | break; | |
1285 | ||
1286 | case GIMPLE_EH_FILTER: | |
1287 | s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id); | |
1288 | copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1); | |
1289 | break; | |
1290 | ||
1291 | case GIMPLE_TRY: | |
1292 | s1 = remap_gimple_seq (gimple_try_eval (stmt), id); | |
1293 | s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id); | |
b8698a0f | 1294 | copy = gimple_build_try (s1, s2, gimple_try_kind (stmt)); |
726a989a RB |
1295 | break; |
1296 | ||
1297 | case GIMPLE_WITH_CLEANUP_EXPR: | |
1298 | s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id); | |
1299 | copy = gimple_build_wce (s1); | |
1300 | break; | |
1301 | ||
1302 | case GIMPLE_OMP_PARALLEL: | |
1303 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1304 | copy = gimple_build_omp_parallel | |
1305 | (s1, | |
1306 | gimple_omp_parallel_clauses (stmt), | |
1307 | gimple_omp_parallel_child_fn (stmt), | |
1308 | gimple_omp_parallel_data_arg (stmt)); | |
1309 | break; | |
1310 | ||
1311 | case GIMPLE_OMP_TASK: | |
1312 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1313 | copy = gimple_build_omp_task | |
1314 | (s1, | |
1315 | gimple_omp_task_clauses (stmt), | |
1316 | gimple_omp_task_child_fn (stmt), | |
1317 | gimple_omp_task_data_arg (stmt), | |
1318 | gimple_omp_task_copy_fn (stmt), | |
1319 | gimple_omp_task_arg_size (stmt), | |
1320 | gimple_omp_task_arg_align (stmt)); | |
1321 | break; | |
1322 | ||
1323 | case GIMPLE_OMP_FOR: | |
1324 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1325 | s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id); | |
74bf76ed JJ |
1326 | copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt), |
1327 | gimple_omp_for_clauses (stmt), | |
726a989a RB |
1328 | gimple_omp_for_collapse (stmt), s2); |
1329 | { | |
1330 | size_t i; | |
1331 | for (i = 0; i < gimple_omp_for_collapse (stmt); i++) | |
1332 | { | |
1333 | gimple_omp_for_set_index (copy, i, | |
1334 | gimple_omp_for_index (stmt, i)); | |
1335 | gimple_omp_for_set_initial (copy, i, | |
1336 | gimple_omp_for_initial (stmt, i)); | |
1337 | gimple_omp_for_set_final (copy, i, | |
1338 | gimple_omp_for_final (stmt, i)); | |
1339 | gimple_omp_for_set_incr (copy, i, | |
1340 | gimple_omp_for_incr (stmt, i)); | |
1341 | gimple_omp_for_set_cond (copy, i, | |
1342 | gimple_omp_for_cond (stmt, i)); | |
1343 | } | |
1344 | } | |
1345 | break; | |
1346 | ||
1347 | case GIMPLE_OMP_MASTER: | |
1348 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1349 | copy = gimple_build_omp_master (s1); | |
1350 | break; | |
1351 | ||
acf0174b JJ |
1352 | case GIMPLE_OMP_TASKGROUP: |
1353 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1354 | copy = gimple_build_omp_taskgroup (s1); | |
1355 | break; | |
1356 | ||
726a989a RB |
1357 | case GIMPLE_OMP_ORDERED: |
1358 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1359 | copy = gimple_build_omp_ordered (s1); | |
1360 | break; | |
1361 | ||
1362 | case GIMPLE_OMP_SECTION: | |
1363 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1364 | copy = gimple_build_omp_section (s1); | |
1365 | break; | |
1366 | ||
1367 | case GIMPLE_OMP_SECTIONS: | |
1368 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1369 | copy = gimple_build_omp_sections | |
1370 | (s1, gimple_omp_sections_clauses (stmt)); | |
1371 | break; | |
1372 | ||
1373 | case GIMPLE_OMP_SINGLE: | |
1374 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1375 | copy = gimple_build_omp_single | |
1376 | (s1, gimple_omp_single_clauses (stmt)); | |
1377 | break; | |
1378 | ||
acf0174b JJ |
1379 | case GIMPLE_OMP_TARGET: |
1380 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1381 | copy = gimple_build_omp_target | |
1382 | (s1, gimple_omp_target_kind (stmt), | |
1383 | gimple_omp_target_clauses (stmt)); | |
1384 | break; | |
1385 | ||
1386 | case GIMPLE_OMP_TEAMS: | |
1387 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1388 | copy = gimple_build_omp_teams | |
1389 | (s1, gimple_omp_teams_clauses (stmt)); | |
1390 | break; | |
1391 | ||
05a26161 JJ |
1392 | case GIMPLE_OMP_CRITICAL: |
1393 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1394 | copy | |
1395 | = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt)); | |
1396 | break; | |
1397 | ||
0a35513e AH |
1398 | case GIMPLE_TRANSACTION: |
1399 | s1 = remap_gimple_seq (gimple_transaction_body (stmt), id); | |
1400 | copy = gimple_build_transaction (s1, gimple_transaction_label (stmt)); | |
1401 | gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt)); | |
1402 | break; | |
1403 | ||
726a989a RB |
1404 | default: |
1405 | gcc_unreachable (); | |
1406 | } | |
1407 | } | |
1408 | else | |
1409 | { | |
1410 | if (gimple_assign_copy_p (stmt) | |
1411 | && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt) | |
1412 | && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn)) | |
1413 | { | |
1414 | /* Here we handle statements that are not completely rewritten. | |
1415 | First we detect some inlining-induced bogosities for | |
1416 | discarding. */ | |
1417 | ||
1418 | /* Some assignments VAR = VAR; don't generate any rtl code | |
1419 | and thus don't count as variable modification. Avoid | |
1420 | keeping bogosities like 0 = 0. */ | |
1421 | tree decl = gimple_assign_lhs (stmt), value; | |
1422 | tree *n; | |
1423 | ||
1424 | n = (tree *) pointer_map_contains (id->decl_map, decl); | |
1425 | if (n) | |
1426 | { | |
1427 | value = *n; | |
1428 | STRIP_TYPE_NOPS (value); | |
1429 | if (TREE_CONSTANT (value) || TREE_READONLY (value)) | |
1430 | return gimple_build_nop (); | |
1431 | } | |
1432 | } | |
1433 | ||
4029a5e0 JJ |
1434 | /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined |
1435 | in a block that we aren't copying during tree_function_versioning, | |
1436 | just drop the clobber stmt. */ | |
1437 | if (id->blocks_to_copy && gimple_clobber_p (stmt)) | |
1438 | { | |
1439 | tree lhs = gimple_assign_lhs (stmt); | |
1440 | if (TREE_CODE (lhs) == MEM_REF | |
1441 | && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME) | |
1442 | { | |
1443 | gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0)); | |
1444 | if (gimple_bb (def_stmt) | |
1445 | && !bitmap_bit_p (id->blocks_to_copy, | |
1446 | gimple_bb (def_stmt)->index)) | |
1447 | return gimple_build_nop (); | |
1448 | } | |
1449 | } | |
1450 | ||
b5b8b0ac AO |
1451 | if (gimple_debug_bind_p (stmt)) |
1452 | { | |
1453 | copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt), | |
1454 | gimple_debug_bind_get_value (stmt), | |
1455 | stmt); | |
9771b263 | 1456 | id->debug_stmts.safe_push (copy); |
b5b8b0ac AO |
1457 | return copy; |
1458 | } | |
ddb555ed JJ |
1459 | if (gimple_debug_source_bind_p (stmt)) |
1460 | { | |
1461 | copy = gimple_build_debug_source_bind | |
1462 | (gimple_debug_source_bind_get_var (stmt), | |
1463 | gimple_debug_source_bind_get_value (stmt), stmt); | |
9771b263 | 1464 | id->debug_stmts.safe_push (copy); |
ddb555ed JJ |
1465 | return copy; |
1466 | } | |
1d65f45c RH |
1467 | |
1468 | /* Create a new deep copy of the statement. */ | |
1469 | copy = gimple_copy (stmt); | |
1470 | ||
1471 | /* Remap the region numbers for __builtin_eh_{pointer,filter}, | |
1472 | RESX and EH_DISPATCH. */ | |
1473 | if (id->eh_map) | |
1474 | switch (gimple_code (copy)) | |
1475 | { | |
1476 | case GIMPLE_CALL: | |
1477 | { | |
1478 | tree r, fndecl = gimple_call_fndecl (copy); | |
1479 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
1480 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1481 | { | |
1482 | case BUILT_IN_EH_COPY_VALUES: | |
1483 | r = gimple_call_arg (copy, 1); | |
1484 | r = remap_eh_region_tree_nr (r, id); | |
1485 | gimple_call_set_arg (copy, 1, r); | |
1486 | /* FALLTHRU */ | |
1487 | ||
1488 | case BUILT_IN_EH_POINTER: | |
1489 | case BUILT_IN_EH_FILTER: | |
1490 | r = gimple_call_arg (copy, 0); | |
1491 | r = remap_eh_region_tree_nr (r, id); | |
1492 | gimple_call_set_arg (copy, 0, r); | |
1493 | break; | |
1494 | ||
1495 | default: | |
1496 | break; | |
1497 | } | |
d086d311 | 1498 | |
25a6a873 RG |
1499 | /* Reset alias info if we didn't apply measures to |
1500 | keep it valid over inlining by setting DECL_PT_UID. */ | |
1501 | if (!id->src_cfun->gimple_df | |
1502 | || !id->src_cfun->gimple_df->ipa_pta) | |
1503 | gimple_call_reset_alias_info (copy); | |
1d65f45c RH |
1504 | } |
1505 | break; | |
1506 | ||
1507 | case GIMPLE_RESX: | |
1508 | { | |
1509 | int r = gimple_resx_region (copy); | |
1510 | r = remap_eh_region_nr (r, id); | |
1511 | gimple_resx_set_region (copy, r); | |
1512 | } | |
1513 | break; | |
1514 | ||
1515 | case GIMPLE_EH_DISPATCH: | |
1516 | { | |
1517 | int r = gimple_eh_dispatch_region (copy); | |
1518 | r = remap_eh_region_nr (r, id); | |
1519 | gimple_eh_dispatch_set_region (copy, r); | |
1520 | } | |
1521 | break; | |
1522 | ||
1523 | default: | |
1524 | break; | |
1525 | } | |
726a989a RB |
1526 | } |
1527 | ||
1528 | /* If STMT has a block defined, map it to the newly constructed | |
16917761 | 1529 | block. */ |
726a989a RB |
1530 | if (gimple_block (copy)) |
1531 | { | |
1532 | tree *n; | |
1533 | n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy)); | |
1534 | gcc_assert (n); | |
16917761 | 1535 | gimple_set_block (copy, *n); |
726a989a RB |
1536 | } |
1537 | ||
ddb555ed | 1538 | if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy)) |
b5b8b0ac AO |
1539 | return copy; |
1540 | ||
726a989a RB |
1541 | /* Remap all the operands in COPY. */ |
1542 | memset (&wi, 0, sizeof (wi)); | |
1543 | wi.info = id; | |
5a6e26b7 JH |
1544 | if (skip_first) |
1545 | walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL); | |
1546 | else | |
b8698a0f | 1547 | walk_gimple_op (copy, remap_gimple_op_r, &wi); |
726a989a | 1548 | |
5006671f RG |
1549 | /* Clear the copied virtual operands. We are not remapping them here |
1550 | but are going to recreate them from scratch. */ | |
1551 | if (gimple_has_mem_ops (copy)) | |
1552 | { | |
1553 | gimple_set_vdef (copy, NULL_TREE); | |
1554 | gimple_set_vuse (copy, NULL_TREE); | |
1555 | } | |
1556 | ||
726a989a RB |
1557 | return copy; |
1558 | } | |
1559 | ||
1560 | ||
e21aff8a SB |
1561 | /* Copy basic block, scale profile accordingly. Edges will be taken care of |
1562 | later */ | |
1563 | ||
1564 | static basic_block | |
0178d644 VR |
1565 | copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, |
1566 | gcov_type count_scale) | |
e21aff8a | 1567 | { |
c2a4718a | 1568 | gimple_stmt_iterator gsi, copy_gsi, seq_gsi; |
e21aff8a | 1569 | basic_block copy_basic_block; |
726a989a | 1570 | tree decl; |
0d63a740 | 1571 | gcov_type freq; |
91382288 JH |
1572 | basic_block prev; |
1573 | ||
1574 | /* Search for previous copied basic block. */ | |
1575 | prev = bb->prev_bb; | |
1576 | while (!prev->aux) | |
1577 | prev = prev->prev_bb; | |
e21aff8a SB |
1578 | |
1579 | /* create_basic_block() will append every new block to | |
1580 | basic_block_info automatically. */ | |
cceb1885 | 1581 | copy_basic_block = create_basic_block (NULL, (void *) 0, |
91382288 | 1582 | (basic_block) prev->aux); |
8b47039c | 1583 | copy_basic_block->count = apply_scale (bb->count, count_scale); |
45a80bb9 | 1584 | |
726a989a RB |
1585 | /* We are going to rebuild frequencies from scratch. These values |
1586 | have just small importance to drive canonicalize_loop_headers. */ | |
8b47039c | 1587 | freq = apply_scale ((gcov_type)bb->frequency, frequency_scale); |
726a989a | 1588 | |
0d63a740 JH |
1589 | /* We recompute frequencies after inlining, so this is quite safe. */ |
1590 | if (freq > BB_FREQ_MAX) | |
1591 | freq = BB_FREQ_MAX; | |
1592 | copy_basic_block->frequency = freq; | |
e21aff8a | 1593 | |
726a989a RB |
1594 | copy_gsi = gsi_start_bb (copy_basic_block); |
1595 | ||
1596 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
e21aff8a | 1597 | { |
726a989a RB |
1598 | gimple stmt = gsi_stmt (gsi); |
1599 | gimple orig_stmt = stmt; | |
e21aff8a | 1600 | |
416c991f | 1601 | id->regimplify = false; |
726a989a RB |
1602 | stmt = remap_gimple_stmt (stmt, id); |
1603 | if (gimple_nop_p (stmt)) | |
1604 | continue; | |
1605 | ||
1606 | gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt); | |
c2a4718a | 1607 | seq_gsi = copy_gsi; |
726a989a RB |
1608 | |
1609 | /* With return slot optimization we can end up with | |
1610 | non-gimple (foo *)&this->m, fix that here. */ | |
4c29307d JJ |
1611 | if (is_gimple_assign (stmt) |
1612 | && gimple_assign_rhs_code (stmt) == NOP_EXPR | |
1613 | && !is_gimple_val (gimple_assign_rhs1 (stmt))) | |
e21aff8a | 1614 | { |
726a989a | 1615 | tree new_rhs; |
c2a4718a | 1616 | new_rhs = force_gimple_operand_gsi (&seq_gsi, |
4a2b7f24 | 1617 | gimple_assign_rhs1 (stmt), |
cf1bcf06 EB |
1618 | true, NULL, false, |
1619 | GSI_CONTINUE_LINKING); | |
726a989a | 1620 | gimple_assign_set_rhs1 (stmt, new_rhs); |
c2a4718a | 1621 | id->regimplify = false; |
726a989a | 1622 | } |
2b65dae5 | 1623 | |
c2a4718a JJ |
1624 | gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT); |
1625 | ||
1626 | if (id->regimplify) | |
1627 | gimple_regimplify_operands (stmt, &seq_gsi); | |
1628 | ||
1629 | /* If copy_basic_block has been empty at the start of this iteration, | |
1630 | call gsi_start_bb again to get at the newly added statements. */ | |
1631 | if (gsi_end_p (copy_gsi)) | |
1632 | copy_gsi = gsi_start_bb (copy_basic_block); | |
1633 | else | |
1634 | gsi_next (©_gsi); | |
110cfe1c | 1635 | |
726a989a RB |
1636 | /* Process the new statement. The call to gimple_regimplify_operands |
1637 | possibly turned the statement into multiple statements, we | |
1638 | need to process all of them. */ | |
c2a4718a | 1639 | do |
726a989a | 1640 | { |
9187e02d JH |
1641 | tree fn; |
1642 | ||
c2a4718a | 1643 | stmt = gsi_stmt (copy_gsi); |
726a989a RB |
1644 | if (is_gimple_call (stmt) |
1645 | && gimple_call_va_arg_pack_p (stmt) | |
1646 | && id->gimple_call) | |
1647 | { | |
1648 | /* __builtin_va_arg_pack () should be replaced by | |
1649 | all arguments corresponding to ... in the caller. */ | |
1650 | tree p; | |
1651 | gimple new_call; | |
9771b263 | 1652 | vec<tree> argarray; |
726a989a RB |
1653 | size_t nargs = gimple_call_num_args (id->gimple_call); |
1654 | size_t n; | |
1655 | ||
910ad8de | 1656 | for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p)) |
726a989a RB |
1657 | nargs--; |
1658 | ||
1659 | /* Create the new array of arguments. */ | |
1660 | n = nargs + gimple_call_num_args (stmt); | |
9771b263 DN |
1661 | argarray.create (n); |
1662 | argarray.safe_grow_cleared (n); | |
726a989a RB |
1663 | |
1664 | /* Copy all the arguments before '...' */ | |
9771b263 | 1665 | memcpy (argarray.address (), |
726a989a RB |
1666 | gimple_call_arg_ptr (stmt, 0), |
1667 | gimple_call_num_args (stmt) * sizeof (tree)); | |
1668 | ||
1669 | /* Append the arguments passed in '...' */ | |
9771b263 | 1670 | memcpy (argarray.address () + gimple_call_num_args (stmt), |
726a989a RB |
1671 | gimple_call_arg_ptr (id->gimple_call, 0) |
1672 | + (gimple_call_num_args (id->gimple_call) - nargs), | |
1673 | nargs * sizeof (tree)); | |
1674 | ||
1675 | new_call = gimple_build_call_vec (gimple_call_fn (stmt), | |
1676 | argarray); | |
1677 | ||
9771b263 | 1678 | argarray.release (); |
726a989a RB |
1679 | |
1680 | /* Copy all GIMPLE_CALL flags, location and block, except | |
1681 | GF_CALL_VA_ARG_PACK. */ | |
1682 | gimple_call_copy_flags (new_call, stmt); | |
1683 | gimple_call_set_va_arg_pack (new_call, false); | |
1684 | gimple_set_location (new_call, gimple_location (stmt)); | |
1685 | gimple_set_block (new_call, gimple_block (stmt)); | |
1686 | gimple_call_set_lhs (new_call, gimple_call_lhs (stmt)); | |
1687 | ||
1688 | gsi_replace (©_gsi, new_call, false); | |
1689 | stmt = new_call; | |
1690 | } | |
1691 | else if (is_gimple_call (stmt) | |
1692 | && id->gimple_call | |
1693 | && (decl = gimple_call_fndecl (stmt)) | |
1694 | && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL | |
1695 | && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN) | |
e0704a46 | 1696 | { |
726a989a RB |
1697 | /* __builtin_va_arg_pack_len () should be replaced by |
1698 | the number of anonymous arguments. */ | |
1699 | size_t nargs = gimple_call_num_args (id->gimple_call); | |
1700 | tree count, p; | |
1701 | gimple new_stmt; | |
1702 | ||
910ad8de | 1703 | for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p)) |
726a989a RB |
1704 | nargs--; |
1705 | ||
1706 | count = build_int_cst (integer_type_node, nargs); | |
1707 | new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count); | |
1708 | gsi_replace (©_gsi, new_stmt, false); | |
1709 | stmt = new_stmt; | |
1710 | } | |
b8a00a4d | 1711 | |
726a989a RB |
1712 | /* Statements produced by inlining can be unfolded, especially |
1713 | when we constant propagated some operands. We can't fold | |
1714 | them right now for two reasons: | |
1715 | 1) folding require SSA_NAME_DEF_STMTs to be correct | |
1716 | 2) we can't change function calls to builtins. | |
1717 | So we just mark statement for later folding. We mark | |
1718 | all new statements, instead just statements that has changed | |
1719 | by some nontrivial substitution so even statements made | |
1720 | foldable indirectly are updated. If this turns out to be | |
1721 | expensive, copy_body can be told to watch for nontrivial | |
1722 | changes. */ | |
1723 | if (id->statements_to_fold) | |
1724 | pointer_set_insert (id->statements_to_fold, stmt); | |
1725 | ||
1726 | /* We're duplicating a CALL_EXPR. Find any corresponding | |
1727 | callgraph edges and update or duplicate them. */ | |
1728 | if (is_gimple_call (stmt)) | |
1729 | { | |
9b2a5ef7 | 1730 | struct cgraph_edge *edge; |
f618d33e | 1731 | int flags; |
6ef5231b | 1732 | |
726a989a | 1733 | switch (id->transform_call_graph_edges) |
e0704a46 | 1734 | { |
9b2a5ef7 RH |
1735 | case CB_CGE_DUPLICATE: |
1736 | edge = cgraph_edge (id->src_node, orig_stmt); | |
1737 | if (edge) | |
0d63a740 JH |
1738 | { |
1739 | int edge_freq = edge->frequency; | |
042ae7d2 JH |
1740 | int new_freq; |
1741 | struct cgraph_edge *old_edge = edge; | |
0d63a740 JH |
1742 | edge = cgraph_clone_edge (edge, id->dst_node, stmt, |
1743 | gimple_uid (stmt), | |
1744 | REG_BR_PROB_BASE, CGRAPH_FREQ_BASE, | |
898b8927 | 1745 | true); |
0d63a740 JH |
1746 | /* We could also just rescale the frequency, but |
1747 | doing so would introduce roundoff errors and make | |
1748 | verifier unhappy. */ | |
67348ccc | 1749 | new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl, |
042ae7d2 JH |
1750 | copy_basic_block); |
1751 | ||
1752 | /* Speculative calls consist of two edges - direct and indirect. | |
1753 | Duplicate the whole thing and distribute frequencies accordingly. */ | |
1754 | if (edge->speculative) | |
0d63a740 | 1755 | { |
042ae7d2 JH |
1756 | struct cgraph_edge *direct, *indirect; |
1757 | struct ipa_ref *ref; | |
1758 | ||
1759 | gcc_assert (!edge->indirect_unknown_callee); | |
1760 | cgraph_speculative_call_info (old_edge, direct, indirect, ref); | |
1761 | indirect = cgraph_clone_edge (indirect, id->dst_node, stmt, | |
1762 | gimple_uid (stmt), | |
1763 | REG_BR_PROB_BASE, CGRAPH_FREQ_BASE, | |
1764 | true); | |
1765 | if (old_edge->frequency + indirect->frequency) | |
1766 | { | |
1767 | edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency, | |
1768 | (old_edge->frequency + indirect->frequency)), | |
1769 | CGRAPH_FREQ_MAX); | |
1770 | indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency, | |
1771 | (old_edge->frequency + indirect->frequency)), | |
1772 | CGRAPH_FREQ_MAX); | |
1773 | } | |
67348ccc | 1774 | ipa_clone_ref (ref, id->dst_node, stmt); |
042ae7d2 JH |
1775 | } |
1776 | else | |
1777 | { | |
1778 | edge->frequency = new_freq; | |
1779 | if (dump_file | |
1780 | && profile_status_for_function (cfun) != PROFILE_ABSENT | |
1781 | && (edge_freq > edge->frequency + 10 | |
1782 | || edge_freq < edge->frequency - 10)) | |
1783 | { | |
1784 | fprintf (dump_file, "Edge frequency estimated by " | |
1785 | "cgraph %i diverge from inliner's estimate %i\n", | |
1786 | edge_freq, | |
1787 | edge->frequency); | |
1788 | fprintf (dump_file, | |
1789 | "Orig bb: %i, orig bb freq %i, new bb freq %i\n", | |
1790 | bb->index, | |
1791 | bb->frequency, | |
1792 | copy_basic_block->frequency); | |
1793 | } | |
0d63a740 JH |
1794 | } |
1795 | } | |
9b2a5ef7 RH |
1796 | break; |
1797 | ||
1798 | case CB_CGE_MOVE_CLONES: | |
1799 | cgraph_set_call_stmt_including_clones (id->dst_node, | |
1800 | orig_stmt, stmt); | |
1801 | edge = cgraph_edge (id->dst_node, stmt); | |
1802 | break; | |
1803 | ||
1804 | case CB_CGE_MOVE: | |
1805 | edge = cgraph_edge (id->dst_node, orig_stmt); | |
1806 | if (edge) | |
1807 | cgraph_set_call_stmt (edge, stmt); | |
1808 | break; | |
1809 | ||
1810 | default: | |
1811 | gcc_unreachable (); | |
110cfe1c | 1812 | } |
f618d33e | 1813 | |
9b2a5ef7 RH |
1814 | /* Constant propagation on argument done during inlining |
1815 | may create new direct call. Produce an edge for it. */ | |
b8698a0f | 1816 | if ((!edge |
e33c6cd6 | 1817 | || (edge->indirect_inlining_edge |
9b2a5ef7 | 1818 | && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)) |
67348ccc | 1819 | && id->dst_node->definition |
9b2a5ef7 RH |
1820 | && (fn = gimple_call_fndecl (stmt)) != NULL) |
1821 | { | |
581985d7 | 1822 | struct cgraph_node *dest = cgraph_get_node (fn); |
9b2a5ef7 RH |
1823 | |
1824 | /* We have missing edge in the callgraph. This can happen | |
1825 | when previous inlining turned an indirect call into a | |
0e3776db | 1826 | direct call by constant propagating arguments or we are |
20a6bb58 | 1827 | producing dead clone (for further cloning). In all |
9b2a5ef7 RH |
1828 | other cases we hit a bug (incorrect node sharing is the |
1829 | most common reason for missing edges). */ | |
67348ccc DM |
1830 | gcc_assert (!dest->definition |
1831 | || dest->address_taken | |
1832 | || !id->src_node->definition | |
1833 | || !id->dst_node->definition); | |
9b2a5ef7 RH |
1834 | if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES) |
1835 | cgraph_create_edge_including_clones | |
47cb0d7d | 1836 | (id->dst_node, dest, orig_stmt, stmt, bb->count, |
67348ccc | 1837 | compute_call_stmt_bb_frequency (id->dst_node->decl, |
0d63a740 | 1838 | copy_basic_block), |
898b8927 | 1839 | CIF_ORIGINALLY_INDIRECT_CALL); |
9b2a5ef7 RH |
1840 | else |
1841 | cgraph_create_edge (id->dst_node, dest, stmt, | |
47cb0d7d JH |
1842 | bb->count, |
1843 | compute_call_stmt_bb_frequency | |
67348ccc | 1844 | (id->dst_node->decl, |
960bfb69 | 1845 | copy_basic_block))->inline_failed |
9b2a5ef7 RH |
1846 | = CIF_ORIGINALLY_INDIRECT_CALL; |
1847 | if (dump_file) | |
1848 | { | |
91382288 | 1849 | fprintf (dump_file, "Created new direct edge to %s\n", |
9b2a5ef7 RH |
1850 | cgraph_node_name (dest)); |
1851 | } | |
1852 | } | |
9187e02d | 1853 | |
f618d33e | 1854 | flags = gimple_call_flags (stmt); |
f618d33e MJ |
1855 | if (flags & ECF_MAY_BE_ALLOCA) |
1856 | cfun->calls_alloca = true; | |
1857 | if (flags & ECF_RETURNS_TWICE) | |
1858 | cfun->calls_setjmp = true; | |
726a989a | 1859 | } |
e21aff8a | 1860 | |
1d65f45c RH |
1861 | maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt, |
1862 | id->eh_map, id->eh_lp_nr); | |
726a989a | 1863 | |
b5b8b0ac | 1864 | if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt)) |
726a989a RB |
1865 | { |
1866 | ssa_op_iter i; | |
1867 | tree def; | |
1868 | ||
726a989a RB |
1869 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF) |
1870 | if (TREE_CODE (def) == SSA_NAME) | |
1871 | SSA_NAME_DEF_STMT (def) = stmt; | |
1872 | } | |
1873 | ||
1874 | gsi_next (©_gsi); | |
e21aff8a | 1875 | } |
c2a4718a | 1876 | while (!gsi_end_p (copy_gsi)); |
726a989a RB |
1877 | |
1878 | copy_gsi = gsi_last_bb (copy_basic_block); | |
e21aff8a | 1879 | } |
726a989a | 1880 | |
e21aff8a SB |
1881 | return copy_basic_block; |
1882 | } | |
1883 | ||
110cfe1c JH |
1884 | /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA |
1885 | form is quite easy, since dominator relationship for old basic blocks does | |
1886 | not change. | |
1887 | ||
1888 | There is however exception where inlining might change dominator relation | |
1889 | across EH edges from basic block within inlined functions destinating | |
5305a4cb | 1890 | to landing pads in function we inline into. |
110cfe1c | 1891 | |
e9705dc5 AO |
1892 | The function fills in PHI_RESULTs of such PHI nodes if they refer |
1893 | to gimple regs. Otherwise, the function mark PHI_RESULT of such | |
1894 | PHI nodes for renaming. For non-gimple regs, renaming is safe: the | |
1895 | EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be | |
1896 | set, and this means that there will be no overlapping live ranges | |
110cfe1c JH |
1897 | for the underlying symbol. |
1898 | ||
1899 | This might change in future if we allow redirecting of EH edges and | |
1900 | we might want to change way build CFG pre-inlining to include | |
1901 | all the possible edges then. */ | |
1902 | static void | |
e9705dc5 AO |
1903 | update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb, |
1904 | bool can_throw, bool nonlocal_goto) | |
110cfe1c JH |
1905 | { |
1906 | edge e; | |
1907 | edge_iterator ei; | |
1908 | ||
1909 | FOR_EACH_EDGE (e, ei, bb->succs) | |
1910 | if (!e->dest->aux | |
1911 | || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK) | |
1912 | { | |
726a989a RB |
1913 | gimple phi; |
1914 | gimple_stmt_iterator si; | |
110cfe1c | 1915 | |
e9705dc5 AO |
1916 | if (!nonlocal_goto) |
1917 | gcc_assert (e->flags & EDGE_EH); | |
726a989a | 1918 | |
e9705dc5 AO |
1919 | if (!can_throw) |
1920 | gcc_assert (!(e->flags & EDGE_EH)); | |
726a989a RB |
1921 | |
1922 | for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si)) | |
110cfe1c | 1923 | { |
e9705dc5 AO |
1924 | edge re; |
1925 | ||
726a989a RB |
1926 | phi = gsi_stmt (si); |
1927 | ||
3f8825c0 RB |
1928 | /* For abnormal goto/call edges the receiver can be the |
1929 | ENTRY_BLOCK. Do not assert this cannot happen. */ | |
e9705dc5 | 1930 | |
496a4ef5 JH |
1931 | gcc_assert ((e->flags & EDGE_EH) |
1932 | || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi))); | |
e9705dc5 | 1933 | |
e9705dc5 | 1934 | re = find_edge (ret_bb, e->dest); |
0107dca2 | 1935 | gcc_checking_assert (re); |
e9705dc5 AO |
1936 | gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL)) |
1937 | == (e->flags & (EDGE_EH | EDGE_ABNORMAL))); | |
1938 | ||
1939 | SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), | |
1940 | USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re))); | |
110cfe1c JH |
1941 | } |
1942 | } | |
1943 | } | |
1944 | ||
726a989a | 1945 | |
128a79fb KH |
1946 | /* Copy edges from BB into its copy constructed earlier, scale profile |
1947 | accordingly. Edges will be taken care of later. Assume aux | |
90a7788b JJ |
1948 | pointers to point to the copies of each BB. Return true if any |
1949 | debug stmts are left after a statement that must end the basic block. */ | |
726a989a | 1950 | |
90a7788b | 1951 | static bool |
92e776e9 RB |
1952 | copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb, |
1953 | bool can_make_abnormal_goto) | |
e21aff8a | 1954 | { |
cceb1885 | 1955 | basic_block new_bb = (basic_block) bb->aux; |
e21aff8a SB |
1956 | edge_iterator ei; |
1957 | edge old_edge; | |
726a989a | 1958 | gimple_stmt_iterator si; |
e21aff8a | 1959 | int flags; |
90a7788b | 1960 | bool need_debug_cleanup = false; |
e21aff8a SB |
1961 | |
1962 | /* Use the indices from the original blocks to create edges for the | |
1963 | new ones. */ | |
1964 | FOR_EACH_EDGE (old_edge, ei, bb->succs) | |
e0704a46 JH |
1965 | if (!(old_edge->flags & EDGE_EH)) |
1966 | { | |
82d6e6fc | 1967 | edge new_edge; |
e21aff8a | 1968 | |
e0704a46 | 1969 | flags = old_edge->flags; |
e21aff8a | 1970 | |
e0704a46 JH |
1971 | /* Return edges do get a FALLTHRU flag when the get inlined. */ |
1972 | if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags | |
1973 | && old_edge->dest->aux != EXIT_BLOCK_PTR) | |
1974 | flags |= EDGE_FALLTHRU; | |
82d6e6fc | 1975 | new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags); |
8b47039c | 1976 | new_edge->count = apply_scale (old_edge->count, count_scale); |
82d6e6fc | 1977 | new_edge->probability = old_edge->probability; |
e0704a46 | 1978 | } |
e21aff8a SB |
1979 | |
1980 | if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK) | |
90a7788b | 1981 | return false; |
e21aff8a | 1982 | |
726a989a | 1983 | for (si = gsi_start_bb (new_bb); !gsi_end_p (si);) |
e21aff8a | 1984 | { |
726a989a | 1985 | gimple copy_stmt; |
e9705dc5 | 1986 | bool can_throw, nonlocal_goto; |
e21aff8a | 1987 | |
726a989a | 1988 | copy_stmt = gsi_stmt (si); |
b5b8b0ac | 1989 | if (!is_gimple_debug (copy_stmt)) |
f9a21e13 | 1990 | update_stmt (copy_stmt); |
726a989a | 1991 | |
e21aff8a | 1992 | /* Do this before the possible split_block. */ |
726a989a | 1993 | gsi_next (&si); |
e21aff8a SB |
1994 | |
1995 | /* If this tree could throw an exception, there are two | |
1996 | cases where we need to add abnormal edge(s): the | |
1997 | tree wasn't in a region and there is a "current | |
1998 | region" in the caller; or the original tree had | |
1999 | EH edges. In both cases split the block after the tree, | |
2000 | and add abnormal edge(s) as needed; we need both | |
2001 | those from the callee and the caller. | |
2002 | We check whether the copy can throw, because the const | |
2003 | propagation can change an INDIRECT_REF which throws | |
2004 | into a COMPONENT_REF which doesn't. If the copy | |
2005 | can throw, the original could also throw. */ | |
726a989a | 2006 | can_throw = stmt_can_throw_internal (copy_stmt); |
a6f30e66 | 2007 | nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt); |
e9705dc5 AO |
2008 | |
2009 | if (can_throw || nonlocal_goto) | |
e21aff8a | 2010 | { |
90a7788b JJ |
2011 | if (!gsi_end_p (si)) |
2012 | { | |
2013 | while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si))) | |
2014 | gsi_next (&si); | |
2015 | if (gsi_end_p (si)) | |
2016 | need_debug_cleanup = true; | |
2017 | } | |
726a989a | 2018 | if (!gsi_end_p (si)) |
e21aff8a SB |
2019 | /* Note that bb's predecessor edges aren't necessarily |
2020 | right at this point; split_block doesn't care. */ | |
2021 | { | |
2022 | edge e = split_block (new_bb, copy_stmt); | |
110cfe1c | 2023 | |
e21aff8a | 2024 | new_bb = e->dest; |
110cfe1c | 2025 | new_bb->aux = e->src->aux; |
726a989a | 2026 | si = gsi_start_bb (new_bb); |
e21aff8a | 2027 | } |
e9705dc5 | 2028 | } |
e21aff8a | 2029 | |
1d65f45c RH |
2030 | if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH) |
2031 | make_eh_dispatch_edges (copy_stmt); | |
2032 | else if (can_throw) | |
e9705dc5 | 2033 | make_eh_edges (copy_stmt); |
110cfe1c | 2034 | |
a6f30e66 RB |
2035 | /* If the call we inline cannot make abnormal goto do not add |
2036 | additional abnormal edges but only retain those already present | |
2037 | in the original function body. */ | |
2038 | nonlocal_goto &= can_make_abnormal_goto; | |
e9705dc5 | 2039 | if (nonlocal_goto) |
726a989a | 2040 | make_abnormal_goto_edges (gimple_bb (copy_stmt), true); |
e9705dc5 AO |
2041 | |
2042 | if ((can_throw || nonlocal_goto) | |
2043 | && gimple_in_ssa_p (cfun)) | |
726a989a | 2044 | update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb, |
e9705dc5 | 2045 | can_throw, nonlocal_goto); |
110cfe1c | 2046 | } |
90a7788b | 2047 | return need_debug_cleanup; |
110cfe1c JH |
2048 | } |
2049 | ||
2050 | /* Copy the PHIs. All blocks and edges are copied, some blocks | |
2051 | was possibly split and new outgoing EH edges inserted. | |
2052 | BB points to the block of original function and AUX pointers links | |
2053 | the original and newly copied blocks. */ | |
2054 | ||
2055 | static void | |
2056 | copy_phis_for_bb (basic_block bb, copy_body_data *id) | |
2057 | { | |
3d9a9f94 | 2058 | basic_block const new_bb = (basic_block) bb->aux; |
110cfe1c | 2059 | edge_iterator ei; |
726a989a RB |
2060 | gimple phi; |
2061 | gimple_stmt_iterator si; | |
6a78fd06 RG |
2062 | edge new_edge; |
2063 | bool inserted = false; | |
110cfe1c | 2064 | |
355a7673 | 2065 | for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si)) |
110cfe1c | 2066 | { |
726a989a RB |
2067 | tree res, new_res; |
2068 | gimple new_phi; | |
110cfe1c | 2069 | |
726a989a RB |
2070 | phi = gsi_stmt (si); |
2071 | res = PHI_RESULT (phi); | |
2072 | new_res = res; | |
ea057359 | 2073 | if (!virtual_operand_p (res)) |
110cfe1c | 2074 | { |
726a989a | 2075 | walk_tree (&new_res, copy_tree_body_r, id, NULL); |
dcc748dd | 2076 | new_phi = create_phi_node (new_res, new_bb); |
110cfe1c JH |
2077 | FOR_EACH_EDGE (new_edge, ei, new_bb->preds) |
2078 | { | |
8b3057b3 JH |
2079 | edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb); |
2080 | tree arg; | |
2081 | tree new_arg; | |
8b3057b3 | 2082 | edge_iterator ei2; |
5368224f | 2083 | location_t locus; |
8b3057b3 | 2084 | |
20a6bb58 | 2085 | /* When doing partial cloning, we allow PHIs on the entry block |
8b3057b3 JH |
2086 | as long as all the arguments are the same. Find any input |
2087 | edge to see argument to copy. */ | |
2088 | if (!old_edge) | |
2089 | FOR_EACH_EDGE (old_edge, ei2, bb->preds) | |
2090 | if (!old_edge->src->aux) | |
2091 | break; | |
2092 | ||
2093 | arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge); | |
2094 | new_arg = arg; | |
726a989a | 2095 | walk_tree (&new_arg, copy_tree_body_r, id, NULL); |
110cfe1c | 2096 | gcc_assert (new_arg); |
36b6e793 JJ |
2097 | /* With return slot optimization we can end up with |
2098 | non-gimple (foo *)&this->m, fix that here. */ | |
2099 | if (TREE_CODE (new_arg) != SSA_NAME | |
2100 | && TREE_CODE (new_arg) != FUNCTION_DECL | |
2101 | && !is_gimple_val (new_arg)) | |
2102 | { | |
726a989a RB |
2103 | gimple_seq stmts = NULL; |
2104 | new_arg = force_gimple_operand (new_arg, &stmts, true, NULL); | |
6a78fd06 RG |
2105 | gsi_insert_seq_on_edge (new_edge, stmts); |
2106 | inserted = true; | |
36b6e793 | 2107 | } |
5368224f | 2108 | locus = gimple_phi_arg_location_from_edge (phi, old_edge); |
5368224f DC |
2109 | if (LOCATION_BLOCK (locus)) |
2110 | { | |
2111 | tree *n; | |
2112 | n = (tree *) pointer_map_contains (id->decl_map, | |
2113 | LOCATION_BLOCK (locus)); | |
2114 | gcc_assert (n); | |
ef6179d1 DC |
2115 | if (*n) |
2116 | locus = COMBINE_LOCATION_DATA (line_table, locus, *n); | |
2117 | else | |
2118 | locus = LOCATION_LOCUS (locus); | |
5368224f | 2119 | } |
16917761 RB |
2120 | else |
2121 | locus = LOCATION_LOCUS (locus); | |
5368224f | 2122 | |
16917761 | 2123 | add_phi_arg (new_phi, new_arg, new_edge, locus); |
110cfe1c | 2124 | } |
e21aff8a SB |
2125 | } |
2126 | } | |
6a78fd06 RG |
2127 | |
2128 | /* Commit the delayed edge insertions. */ | |
2129 | if (inserted) | |
2130 | FOR_EACH_EDGE (new_edge, ei, new_bb->preds) | |
2131 | gsi_commit_one_edge_insert (new_edge, NULL); | |
e21aff8a SB |
2132 | } |
2133 | ||
726a989a | 2134 | |
e21aff8a | 2135 | /* Wrapper for remap_decl so it can be used as a callback. */ |
726a989a | 2136 | |
e21aff8a SB |
2137 | static tree |
2138 | remap_decl_1 (tree decl, void *data) | |
2139 | { | |
1b369fae | 2140 | return remap_decl (decl, (copy_body_data *) data); |
e21aff8a SB |
2141 | } |
2142 | ||
110cfe1c | 2143 | /* Build struct function and associated datastructures for the new clone |
af16bc76 MJ |
2144 | NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes |
2145 | the cfun to the function of new_fndecl (and current_function_decl too). */ | |
110cfe1c JH |
2146 | |
2147 | static void | |
0d63a740 | 2148 | initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count) |
110cfe1c | 2149 | { |
110cfe1c | 2150 | struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
0d63a740 | 2151 | gcov_type count_scale; |
110cfe1c | 2152 | |
49bde175 JH |
2153 | if (!DECL_ARGUMENTS (new_fndecl)) |
2154 | DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl); | |
2155 | if (!DECL_RESULT (new_fndecl)) | |
2156 | DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl); | |
2157 | ||
110cfe1c | 2158 | if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count) |
8b47039c TJ |
2159 | count_scale |
2160 | = GCOV_COMPUTE_SCALE (count, | |
2161 | ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count); | |
110cfe1c | 2162 | else |
0d63a740 | 2163 | count_scale = REG_BR_PROB_BASE; |
110cfe1c JH |
2164 | |
2165 | /* Register specific tree functions. */ | |
726a989a | 2166 | gimple_register_cfg_hooks (); |
39ecc018 JH |
2167 | |
2168 | /* Get clean struct function. */ | |
2169 | push_struct_function (new_fndecl); | |
2170 | ||
2171 | /* We will rebuild these, so just sanity check that they are empty. */ | |
2172 | gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL); | |
2173 | gcc_assert (cfun->local_decls == NULL); | |
2174 | gcc_assert (cfun->cfg == NULL); | |
2175 | gcc_assert (cfun->decl == new_fndecl); | |
2176 | ||
20a6bb58 | 2177 | /* Copy items we preserve during cloning. */ |
39ecc018 JH |
2178 | cfun->static_chain_decl = src_cfun->static_chain_decl; |
2179 | cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area; | |
2180 | cfun->function_end_locus = src_cfun->function_end_locus; | |
a9e0d843 | 2181 | cfun->curr_properties = src_cfun->curr_properties; |
39ecc018 | 2182 | cfun->last_verified = src_cfun->last_verified; |
39ecc018 JH |
2183 | cfun->va_list_gpr_size = src_cfun->va_list_gpr_size; |
2184 | cfun->va_list_fpr_size = src_cfun->va_list_fpr_size; | |
39ecc018 JH |
2185 | cfun->has_nonlocal_label = src_cfun->has_nonlocal_label; |
2186 | cfun->stdarg = src_cfun->stdarg; | |
39ecc018 | 2187 | cfun->after_inlining = src_cfun->after_inlining; |
8f4f502f EB |
2188 | cfun->can_throw_non_call_exceptions |
2189 | = src_cfun->can_throw_non_call_exceptions; | |
9510c5af | 2190 | cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions; |
39ecc018 JH |
2191 | cfun->returns_struct = src_cfun->returns_struct; |
2192 | cfun->returns_pcc_struct = src_cfun->returns_pcc_struct; | |
39ecc018 | 2193 | |
110cfe1c JH |
2194 | init_empty_tree_cfg (); |
2195 | ||
0d63a740 | 2196 | profile_status_for_function (cfun) = profile_status_for_function (src_cfun); |
110cfe1c JH |
2197 | ENTRY_BLOCK_PTR->count = |
2198 | (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale / | |
2199 | REG_BR_PROB_BASE); | |
0d63a740 JH |
2200 | ENTRY_BLOCK_PTR->frequency |
2201 | = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency; | |
110cfe1c JH |
2202 | EXIT_BLOCK_PTR->count = |
2203 | (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale / | |
2204 | REG_BR_PROB_BASE); | |
2205 | EXIT_BLOCK_PTR->frequency = | |
0d63a740 | 2206 | EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency; |
110cfe1c JH |
2207 | if (src_cfun->eh) |
2208 | init_eh_for_function (); | |
2209 | ||
2210 | if (src_cfun->gimple_df) | |
2211 | { | |
5db9ba0c | 2212 | init_tree_ssa (cfun); |
110cfe1c | 2213 | cfun->gimple_df->in_ssa_p = true; |
3828719a | 2214 | init_ssa_operands (cfun); |
110cfe1c | 2215 | } |
110cfe1c JH |
2216 | } |
2217 | ||
90a7788b JJ |
2218 | /* Helper function for copy_cfg_body. Move debug stmts from the end |
2219 | of NEW_BB to the beginning of successor basic blocks when needed. If the | |
2220 | successor has multiple predecessors, reset them, otherwise keep | |
2221 | their value. */ | |
2222 | ||
2223 | static void | |
2224 | maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb) | |
2225 | { | |
2226 | edge e; | |
2227 | edge_iterator ei; | |
2228 | gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb); | |
2229 | ||
2230 | if (gsi_end_p (si) | |
2231 | || gsi_one_before_end_p (si) | |
2232 | || !(stmt_can_throw_internal (gsi_stmt (si)) | |
2233 | || stmt_can_make_abnormal_goto (gsi_stmt (si)))) | |
2234 | return; | |
2235 | ||
2236 | FOR_EACH_EDGE (e, ei, new_bb->succs) | |
2237 | { | |
2238 | gimple_stmt_iterator ssi = gsi_last_bb (new_bb); | |
2239 | gimple_stmt_iterator dsi = gsi_after_labels (e->dest); | |
2240 | while (is_gimple_debug (gsi_stmt (ssi))) | |
2241 | { | |
2242 | gimple stmt = gsi_stmt (ssi), new_stmt; | |
2243 | tree var; | |
2244 | tree value; | |
2245 | ||
2246 | /* For the last edge move the debug stmts instead of copying | |
2247 | them. */ | |
2248 | if (ei_one_before_end_p (ei)) | |
2249 | { | |
2250 | si = ssi; | |
2251 | gsi_prev (&ssi); | |
ddb555ed | 2252 | if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt)) |
90a7788b JJ |
2253 | gimple_debug_bind_reset_value (stmt); |
2254 | gsi_remove (&si, false); | |
2255 | gsi_insert_before (&dsi, stmt, GSI_SAME_STMT); | |
2256 | continue; | |
2257 | } | |
2258 | ||
ddb555ed | 2259 | if (gimple_debug_bind_p (stmt)) |
90a7788b | 2260 | { |
ddb555ed JJ |
2261 | var = gimple_debug_bind_get_var (stmt); |
2262 | if (single_pred_p (e->dest)) | |
2263 | { | |
2264 | value = gimple_debug_bind_get_value (stmt); | |
2265 | value = unshare_expr (value); | |
2266 | } | |
2267 | else | |
2268 | value = NULL_TREE; | |
2269 | new_stmt = gimple_build_debug_bind (var, value, stmt); | |
2270 | } | |
2271 | else if (gimple_debug_source_bind_p (stmt)) | |
2272 | { | |
2273 | var = gimple_debug_source_bind_get_var (stmt); | |
2274 | value = gimple_debug_source_bind_get_value (stmt); | |
2275 | new_stmt = gimple_build_debug_source_bind (var, value, stmt); | |
90a7788b JJ |
2276 | } |
2277 | else | |
ddb555ed | 2278 | gcc_unreachable (); |
90a7788b | 2279 | gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT); |
9771b263 | 2280 | id->debug_stmts.safe_push (new_stmt); |
90a7788b JJ |
2281 | gsi_prev (&ssi); |
2282 | } | |
2283 | } | |
2284 | } | |
2285 | ||
a9e0d843 RB |
2286 | /* Make a copy of the sub-loops of SRC_PARENT and place them |
2287 | as siblings of DEST_PARENT. */ | |
2288 | ||
2289 | static void | |
f3b331d1 | 2290 | copy_loops (copy_body_data *id, |
a9e0d843 RB |
2291 | struct loop *dest_parent, struct loop *src_parent) |
2292 | { | |
2293 | struct loop *src_loop = src_parent->inner; | |
2294 | while (src_loop) | |
2295 | { | |
f3b331d1 JJ |
2296 | if (!id->blocks_to_copy |
2297 | || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index)) | |
a9e0d843 RB |
2298 | { |
2299 | struct loop *dest_loop = alloc_loop (); | |
2300 | ||
2301 | /* Assign the new loop its header and latch and associate | |
2302 | those with the new loop. */ | |
9f8e7a96 RB |
2303 | if (src_loop->header != NULL) |
2304 | { | |
2305 | dest_loop->header = (basic_block)src_loop->header->aux; | |
2306 | dest_loop->header->loop_father = dest_loop; | |
2307 | } | |
a9e0d843 RB |
2308 | if (src_loop->latch != NULL) |
2309 | { | |
2310 | dest_loop->latch = (basic_block)src_loop->latch->aux; | |
2311 | dest_loop->latch->loop_father = dest_loop; | |
2312 | } | |
2313 | ||
2314 | /* Copy loop meta-data. */ | |
2315 | copy_loop_info (src_loop, dest_loop); | |
2316 | ||
2317 | /* Finally place it into the loop array and the loop tree. */ | |
0fc822d0 | 2318 | place_new_loop (cfun, dest_loop); |
a9e0d843 RB |
2319 | flow_loop_tree_node_add (dest_parent, dest_loop); |
2320 | ||
f3b331d1 JJ |
2321 | if (src_loop->simduid) |
2322 | { | |
2323 | dest_loop->simduid = remap_decl (src_loop->simduid, id); | |
2324 | cfun->has_simduid_loops = true; | |
2325 | } | |
2326 | if (src_loop->force_vect) | |
2327 | { | |
2328 | dest_loop->force_vect = true; | |
2329 | cfun->has_force_vect_loops = true; | |
2330 | } | |
2331 | ||
a9e0d843 | 2332 | /* Recurse. */ |
f3b331d1 | 2333 | copy_loops (id, dest_loop, src_loop); |
a9e0d843 RB |
2334 | } |
2335 | src_loop = src_loop->next; | |
2336 | } | |
2337 | } | |
2338 | ||
042ae7d2 JH |
2339 | /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */ |
2340 | ||
2341 | void | |
2342 | redirect_all_calls (copy_body_data * id, basic_block bb) | |
2343 | { | |
2344 | gimple_stmt_iterator si; | |
2345 | for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si)) | |
2346 | { | |
2347 | if (is_gimple_call (gsi_stmt (si))) | |
2348 | { | |
2349 | struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si)); | |
2350 | if (edge) | |
2351 | cgraph_redirect_edge_call_stmt_to_callee (edge); | |
2352 | } | |
2353 | } | |
2354 | } | |
2355 | ||
eb4b92c1 TJ |
2356 | /* Convert estimated frequencies into counts for NODE, scaling COUNT |
2357 | with each bb's frequency. Used when NODE has a 0-weight entry | |
2358 | but we are about to inline it into a non-zero count call bb. | |
2359 | See the comments for handle_missing_profiles() in predict.c for | |
2360 | when this can happen for COMDATs. */ | |
2361 | ||
2362 | void | |
2363 | freqs_to_counts (struct cgraph_node *node, gcov_type count) | |
2364 | { | |
2365 | basic_block bb; | |
2366 | edge_iterator ei; | |
2367 | edge e; | |
2368 | struct function *fn = DECL_STRUCT_FUNCTION (node->decl); | |
2369 | ||
2370 | FOR_ALL_BB_FN(bb, fn) | |
2371 | { | |
2372 | bb->count = apply_scale (count, | |
2373 | GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX)); | |
2374 | FOR_EACH_EDGE (e, ei, bb->succs) | |
2375 | e->count = apply_probability (e->src->count, e->probability); | |
2376 | } | |
2377 | } | |
2378 | ||
e21aff8a SB |
2379 | /* Make a copy of the body of FN so that it can be inserted inline in |
2380 | another function. Walks FN via CFG, returns new fndecl. */ | |
2381 | ||
2382 | static tree | |
0d63a740 | 2383 | copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale, |
91382288 | 2384 | basic_block entry_block_map, basic_block exit_block_map, |
f3b331d1 | 2385 | basic_block new_entry) |
e21aff8a | 2386 | { |
1b369fae | 2387 | tree callee_fndecl = id->src_fn; |
e21aff8a | 2388 | /* Original cfun for the callee, doesn't change. */ |
1b369fae | 2389 | struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
110cfe1c | 2390 | struct function *cfun_to_copy; |
e21aff8a SB |
2391 | basic_block bb; |
2392 | tree new_fndecl = NULL; | |
90a7788b | 2393 | bool need_debug_cleanup = false; |
0d63a740 | 2394 | gcov_type count_scale; |
110cfe1c | 2395 | int last; |
20a6bb58 JH |
2396 | int incoming_frequency = 0; |
2397 | gcov_type incoming_count = 0; | |
e21aff8a | 2398 | |
eb4b92c1 TJ |
2399 | /* This can happen for COMDAT routines that end up with 0 counts |
2400 | despite being called (see the comments for handle_missing_profiles() | |
2401 | in predict.c as to why). Apply counts to the blocks in the callee | |
2402 | before inlining, using the guessed edge frequencies, so that we don't | |
2403 | end up with a 0-count inline body which can confuse downstream | |
2404 | optimizations such as function splitting. */ | |
2405 | if (!ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count && count) | |
2406 | { | |
2407 | /* Apply the larger of the call bb count and the total incoming | |
2408 | call edge count to the callee. */ | |
2409 | gcov_type in_count = 0; | |
2410 | struct cgraph_edge *in_edge; | |
2411 | for (in_edge = id->src_node->callers; in_edge; | |
2412 | in_edge = in_edge->next_caller) | |
2413 | in_count += in_edge->count; | |
2414 | freqs_to_counts (id->src_node, count > in_count ? count : in_count); | |
2415 | } | |
2416 | ||
1b369fae | 2417 | if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count) |
8b47039c TJ |
2418 | count_scale |
2419 | = GCOV_COMPUTE_SCALE (count, | |
2420 | ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count); | |
e21aff8a | 2421 | else |
0d63a740 | 2422 | count_scale = REG_BR_PROB_BASE; |
e21aff8a SB |
2423 | |
2424 | /* Register specific tree functions. */ | |
726a989a | 2425 | gimple_register_cfg_hooks (); |
e21aff8a | 2426 | |
b35366ce JH |
2427 | /* If we are inlining just region of the function, make sure to connect new entry |
2428 | to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute | |
2429 | frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and | |
20a6bb58 | 2430 | probabilities of edges incoming from nonduplicated region. */ |
b35366ce JH |
2431 | if (new_entry) |
2432 | { | |
2433 | edge e; | |
2434 | edge_iterator ei; | |
2435 | ||
2436 | FOR_EACH_EDGE (e, ei, new_entry->preds) | |
2437 | if (!e->src->aux) | |
2438 | { | |
20a6bb58 JH |
2439 | incoming_frequency += EDGE_FREQUENCY (e); |
2440 | incoming_count += e->count; | |
b35366ce | 2441 | } |
8b47039c | 2442 | incoming_count = apply_scale (incoming_count, count_scale); |
20a6bb58 | 2443 | incoming_frequency |
8b47039c | 2444 | = apply_scale ((gcov_type)incoming_frequency, frequency_scale); |
20a6bb58 JH |
2445 | ENTRY_BLOCK_PTR->count = incoming_count; |
2446 | ENTRY_BLOCK_PTR->frequency = incoming_frequency; | |
b35366ce JH |
2447 | } |
2448 | ||
e21aff8a SB |
2449 | /* Must have a CFG here at this point. */ |
2450 | gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION | |
2451 | (DECL_STRUCT_FUNCTION (callee_fndecl))); | |
2452 | ||
110cfe1c JH |
2453 | cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
2454 | ||
e21aff8a SB |
2455 | ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map; |
2456 | EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map; | |
110cfe1c JH |
2457 | entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy); |
2458 | exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy); | |
e21aff8a | 2459 | |
e21aff8a SB |
2460 | /* Duplicate any exception-handling regions. */ |
2461 | if (cfun->eh) | |
1d65f45c RH |
2462 | id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr, |
2463 | remap_decl_1, id); | |
726a989a | 2464 | |
e21aff8a SB |
2465 | /* Use aux pointers to map the original blocks to copy. */ |
2466 | FOR_EACH_BB_FN (bb, cfun_to_copy) | |
f3b331d1 | 2467 | if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index)) |
91382288 JH |
2468 | { |
2469 | basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale); | |
2470 | bb->aux = new_bb; | |
2471 | new_bb->aux = bb; | |
a9e0d843 | 2472 | new_bb->loop_father = entry_block_map->loop_father; |
91382288 | 2473 | } |
110cfe1c | 2474 | |
7c57be85 | 2475 | last = last_basic_block; |
726a989a | 2476 | |
e21aff8a | 2477 | /* Now that we've duplicated the blocks, duplicate their edges. */ |
92e776e9 RB |
2478 | bool can_make_abormal_goto |
2479 | = id->gimple_call && stmt_can_make_abnormal_goto (id->gimple_call); | |
e21aff8a | 2480 | FOR_ALL_BB_FN (bb, cfun_to_copy) |
f3b331d1 JJ |
2481 | if (!id->blocks_to_copy |
2482 | || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index))) | |
92e776e9 RB |
2483 | need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map, |
2484 | can_make_abormal_goto); | |
726a989a | 2485 | |
91382288 | 2486 | if (new_entry) |
110cfe1c | 2487 | { |
b35366ce | 2488 | edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU); |
91382288 | 2489 | e->probability = REG_BR_PROB_BASE; |
20a6bb58 | 2490 | e->count = incoming_count; |
110cfe1c | 2491 | } |
726a989a | 2492 | |
a9e0d843 | 2493 | /* Duplicate the loop tree, if available and wanted. */ |
0fc822d0 | 2494 | if (loops_for_fn (src_cfun) != NULL |
a9e0d843 RB |
2495 | && current_loops != NULL) |
2496 | { | |
f3b331d1 | 2497 | copy_loops (id, entry_block_map->loop_father, |
0fc822d0 | 2498 | get_loop (src_cfun, 0)); |
a9e0d843 RB |
2499 | /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */ |
2500 | loops_state_set (LOOPS_NEED_FIXUP); | |
2501 | } | |
2502 | ||
9f8e7a96 RB |
2503 | /* If the loop tree in the source function needed fixup, mark the |
2504 | destination loop tree for fixup, too. */ | |
2505 | if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP) | |
2506 | loops_state_set (LOOPS_NEED_FIXUP); | |
2507 | ||
8b3057b3 JH |
2508 | if (gimple_in_ssa_p (cfun)) |
2509 | FOR_ALL_BB_FN (bb, cfun_to_copy) | |
f3b331d1 JJ |
2510 | if (!id->blocks_to_copy |
2511 | || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index))) | |
8b3057b3 JH |
2512 | copy_phis_for_bb (bb, id); |
2513 | ||
91382288 JH |
2514 | FOR_ALL_BB_FN (bb, cfun_to_copy) |
2515 | if (bb->aux) | |
2516 | { | |
2517 | if (need_debug_cleanup | |
2518 | && bb->index != ENTRY_BLOCK | |
2519 | && bb->index != EXIT_BLOCK) | |
2520 | maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux); | |
042ae7d2 JH |
2521 | /* Update call edge destinations. This can not be done before loop |
2522 | info is updated, because we may split basic blocks. */ | |
2523 | if (id->transform_call_graph_edges == CB_CGE_DUPLICATE) | |
2524 | redirect_all_calls (id, (basic_block)bb->aux); | |
91382288 JH |
2525 | ((basic_block)bb->aux)->aux = NULL; |
2526 | bb->aux = NULL; | |
2527 | } | |
2528 | ||
110cfe1c JH |
2529 | /* Zero out AUX fields of newly created block during EH edge |
2530 | insertion. */ | |
7c57be85 | 2531 | for (; last < last_basic_block; last++) |
90a7788b JJ |
2532 | { |
2533 | if (need_debug_cleanup) | |
2534 | maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last)); | |
2535 | BASIC_BLOCK (last)->aux = NULL; | |
042ae7d2 JH |
2536 | /* Update call edge destinations. This can not be done before loop |
2537 | info is updated, because we may split basic blocks. */ | |
2538 | if (id->transform_call_graph_edges == CB_CGE_DUPLICATE) | |
2539 | redirect_all_calls (id, BASIC_BLOCK (last)); | |
90a7788b | 2540 | } |
110cfe1c JH |
2541 | entry_block_map->aux = NULL; |
2542 | exit_block_map->aux = NULL; | |
e21aff8a | 2543 | |
1d65f45c RH |
2544 | if (id->eh_map) |
2545 | { | |
2546 | pointer_map_destroy (id->eh_map); | |
2547 | id->eh_map = NULL; | |
2548 | } | |
2549 | ||
e21aff8a SB |
2550 | return new_fndecl; |
2551 | } | |
2552 | ||
b5b8b0ac AO |
2553 | /* Copy the debug STMT using ID. We deal with these statements in a |
2554 | special way: if any variable in their VALUE expression wasn't | |
2555 | remapped yet, we won't remap it, because that would get decl uids | |
2556 | out of sync, causing codegen differences between -g and -g0. If | |
2557 | this arises, we drop the VALUE expression altogether. */ | |
2558 | ||
2559 | static void | |
2560 | copy_debug_stmt (gimple stmt, copy_body_data *id) | |
2561 | { | |
2562 | tree t, *n; | |
2563 | struct walk_stmt_info wi; | |
2564 | ||
b5b8b0ac AO |
2565 | if (gimple_block (stmt)) |
2566 | { | |
b5b8b0ac | 2567 | n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt)); |
16917761 | 2568 | gimple_set_block (stmt, n ? *n : id->block); |
b5b8b0ac | 2569 | } |
b5b8b0ac AO |
2570 | |
2571 | /* Remap all the operands in COPY. */ | |
2572 | memset (&wi, 0, sizeof (wi)); | |
2573 | wi.info = id; | |
2574 | ||
2575 | processing_debug_stmt = 1; | |
2576 | ||
ddb555ed JJ |
2577 | if (gimple_debug_source_bind_p (stmt)) |
2578 | t = gimple_debug_source_bind_get_var (stmt); | |
2579 | else | |
2580 | t = gimple_debug_bind_get_var (stmt); | |
b5b8b0ac AO |
2581 | |
2582 | if (TREE_CODE (t) == PARM_DECL && id->debug_map | |
2583 | && (n = (tree *) pointer_map_contains (id->debug_map, t))) | |
2584 | { | |
2585 | gcc_assert (TREE_CODE (*n) == VAR_DECL); | |
2586 | t = *n; | |
2587 | } | |
d17af147 | 2588 | else if (TREE_CODE (t) == VAR_DECL |
5f564b8f MM |
2589 | && !is_global_var (t) |
2590 | && !pointer_map_contains (id->decl_map, t)) | |
d17af147 | 2591 | /* T is a non-localized variable. */; |
b5b8b0ac AO |
2592 | else |
2593 | walk_tree (&t, remap_gimple_op_r, &wi, NULL); | |
2594 | ||
ddb555ed JJ |
2595 | if (gimple_debug_bind_p (stmt)) |
2596 | { | |
2597 | gimple_debug_bind_set_var (stmt, t); | |
b5b8b0ac | 2598 | |
ddb555ed JJ |
2599 | if (gimple_debug_bind_has_value_p (stmt)) |
2600 | walk_tree (gimple_debug_bind_get_value_ptr (stmt), | |
2601 | remap_gimple_op_r, &wi, NULL); | |
b5b8b0ac | 2602 | |
ddb555ed JJ |
2603 | /* Punt if any decl couldn't be remapped. */ |
2604 | if (processing_debug_stmt < 0) | |
2605 | gimple_debug_bind_reset_value (stmt); | |
2606 | } | |
2607 | else if (gimple_debug_source_bind_p (stmt)) | |
2608 | { | |
2609 | gimple_debug_source_bind_set_var (stmt, t); | |
2610 | walk_tree (gimple_debug_source_bind_get_value_ptr (stmt), | |
2611 | remap_gimple_op_r, &wi, NULL); | |
878eef4a JJ |
2612 | /* When inlining and source bind refers to one of the optimized |
2613 | away parameters, change the source bind into normal debug bind | |
2614 | referring to the corresponding DEBUG_EXPR_DECL that should have | |
2615 | been bound before the call stmt. */ | |
2616 | t = gimple_debug_source_bind_get_value (stmt); | |
2617 | if (t != NULL_TREE | |
2618 | && TREE_CODE (t) == PARM_DECL | |
2619 | && id->gimple_call) | |
2620 | { | |
9771b263 | 2621 | vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn); |
878eef4a JJ |
2622 | unsigned int i; |
2623 | if (debug_args != NULL) | |
2624 | { | |
9771b263 DN |
2625 | for (i = 0; i < vec_safe_length (*debug_args); i += 2) |
2626 | if ((**debug_args)[i] == DECL_ORIGIN (t) | |
2627 | && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL) | |
878eef4a | 2628 | { |
9771b263 | 2629 | t = (**debug_args)[i + 1]; |
878eef4a JJ |
2630 | stmt->gsbase.subcode = GIMPLE_DEBUG_BIND; |
2631 | gimple_debug_bind_set_value (stmt, t); | |
2632 | break; | |
2633 | } | |
2634 | } | |
2635 | } | |
ddb555ed | 2636 | } |
b5b8b0ac AO |
2637 | |
2638 | processing_debug_stmt = 0; | |
2639 | ||
2640 | update_stmt (stmt); | |
b5b8b0ac AO |
2641 | } |
2642 | ||
2643 | /* Process deferred debug stmts. In order to give values better odds | |
2644 | of being successfully remapped, we delay the processing of debug | |
2645 | stmts until all other stmts that might require remapping are | |
2646 | processed. */ | |
2647 | ||
2648 | static void | |
2649 | copy_debug_stmts (copy_body_data *id) | |
2650 | { | |
2651 | size_t i; | |
2652 | gimple stmt; | |
2653 | ||
9771b263 | 2654 | if (!id->debug_stmts.exists ()) |
b5b8b0ac AO |
2655 | return; |
2656 | ||
9771b263 | 2657 | FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt) |
b5b8b0ac AO |
2658 | copy_debug_stmt (stmt, id); |
2659 | ||
9771b263 | 2660 | id->debug_stmts.release (); |
b5b8b0ac AO |
2661 | } |
2662 | ||
f82a627c EB |
2663 | /* Make a copy of the body of SRC_FN so that it can be inserted inline in |
2664 | another function. */ | |
2665 | ||
2666 | static tree | |
2667 | copy_tree_body (copy_body_data *id) | |
2668 | { | |
2669 | tree fndecl = id->src_fn; | |
2670 | tree body = DECL_SAVED_TREE (fndecl); | |
2671 | ||
2672 | walk_tree (&body, copy_tree_body_r, id, NULL); | |
2673 | ||
2674 | return body; | |
2675 | } | |
2676 | ||
b5b8b0ac AO |
2677 | /* Make a copy of the body of FN so that it can be inserted inline in |
2678 | another function. */ | |
2679 | ||
e21aff8a | 2680 | static tree |
0d63a740 | 2681 | copy_body (copy_body_data *id, gcov_type count, int frequency_scale, |
91382288 | 2682 | basic_block entry_block_map, basic_block exit_block_map, |
f3b331d1 | 2683 | basic_block new_entry) |
e21aff8a | 2684 | { |
1b369fae | 2685 | tree fndecl = id->src_fn; |
e21aff8a SB |
2686 | tree body; |
2687 | ||
2688 | /* If this body has a CFG, walk CFG and copy. */ | |
2689 | gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl))); | |
91382288 | 2690 | body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map, |
f3b331d1 | 2691 | new_entry); |
b5b8b0ac | 2692 | copy_debug_stmts (id); |
e21aff8a SB |
2693 | |
2694 | return body; | |
2695 | } | |
2696 | ||
04482133 AO |
2697 | /* Return true if VALUE is an ADDR_EXPR of an automatic variable |
2698 | defined in function FN, or of a data member thereof. */ | |
2699 | ||
2700 | static bool | |
2701 | self_inlining_addr_expr (tree value, tree fn) | |
2702 | { | |
2703 | tree var; | |
2704 | ||
2705 | if (TREE_CODE (value) != ADDR_EXPR) | |
2706 | return false; | |
2707 | ||
2708 | var = get_base_address (TREE_OPERAND (value, 0)); | |
e21aff8a | 2709 | |
50886bf1 | 2710 | return var && auto_var_in_fn_p (var, fn); |
04482133 AO |
2711 | } |
2712 | ||
b5b8b0ac AO |
2713 | /* Append to BB a debug annotation that binds VAR to VALUE, inheriting |
2714 | lexical block and line number information from base_stmt, if given, | |
2715 | or from the last stmt of the block otherwise. */ | |
2716 | ||
2717 | static gimple | |
2718 | insert_init_debug_bind (copy_body_data *id, | |
2719 | basic_block bb, tree var, tree value, | |
2720 | gimple base_stmt) | |
2721 | { | |
2722 | gimple note; | |
2723 | gimple_stmt_iterator gsi; | |
2724 | tree tracked_var; | |
2725 | ||
2726 | if (!gimple_in_ssa_p (id->src_cfun)) | |
2727 | return NULL; | |
2728 | ||
2729 | if (!MAY_HAVE_DEBUG_STMTS) | |
2730 | return NULL; | |
2731 | ||
2732 | tracked_var = target_for_debug_bind (var); | |
2733 | if (!tracked_var) | |
2734 | return NULL; | |
2735 | ||
2736 | if (bb) | |
2737 | { | |
2738 | gsi = gsi_last_bb (bb); | |
2739 | if (!base_stmt && !gsi_end_p (gsi)) | |
2740 | base_stmt = gsi_stmt (gsi); | |
2741 | } | |
2742 | ||
2743 | note = gimple_build_debug_bind (tracked_var, value, base_stmt); | |
2744 | ||
2745 | if (bb) | |
2746 | { | |
2747 | if (!gsi_end_p (gsi)) | |
2748 | gsi_insert_after (&gsi, note, GSI_SAME_STMT); | |
2749 | else | |
2750 | gsi_insert_before (&gsi, note, GSI_SAME_STMT); | |
2751 | } | |
2752 | ||
2753 | return note; | |
2754 | } | |
2755 | ||
6de9cd9a | 2756 | static void |
b5b8b0ac | 2757 | insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt) |
0f1961a2 | 2758 | { |
0f1961a2 JH |
2759 | /* If VAR represents a zero-sized variable, it's possible that the |
2760 | assignment statement may result in no gimple statements. */ | |
2761 | if (init_stmt) | |
c2a4718a JJ |
2762 | { |
2763 | gimple_stmt_iterator si = gsi_last_bb (bb); | |
0f1961a2 | 2764 | |
bfb0b886 RG |
2765 | /* We can end up with init statements that store to a non-register |
2766 | from a rhs with a conversion. Handle that here by forcing the | |
2767 | rhs into a temporary. gimple_regimplify_operands is not | |
2768 | prepared to do this for us. */ | |
b5b8b0ac AO |
2769 | if (!is_gimple_debug (init_stmt) |
2770 | && !is_gimple_reg (gimple_assign_lhs (init_stmt)) | |
bfb0b886 RG |
2771 | && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt))) |
2772 | && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS) | |
2773 | { | |
2774 | tree rhs = build1 (gimple_assign_rhs_code (init_stmt), | |
2775 | gimple_expr_type (init_stmt), | |
2776 | gimple_assign_rhs1 (init_stmt)); | |
2777 | rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false, | |
2778 | GSI_NEW_STMT); | |
2779 | gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs)); | |
2780 | gimple_assign_set_rhs1 (init_stmt, rhs); | |
2781 | } | |
c2a4718a JJ |
2782 | gsi_insert_after (&si, init_stmt, GSI_NEW_STMT); |
2783 | gimple_regimplify_operands (init_stmt, &si); | |
b5b8b0ac AO |
2784 | |
2785 | if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS) | |
2786 | { | |
70b5e7dc RG |
2787 | tree def = gimple_assign_lhs (init_stmt); |
2788 | insert_init_debug_bind (id, bb, def, def, init_stmt); | |
b5b8b0ac | 2789 | } |
c2a4718a | 2790 | } |
0f1961a2 JH |
2791 | } |
2792 | ||
2793 | /* Initialize parameter P with VALUE. If needed, produce init statement | |
2794 | at the end of BB. When BB is NULL, we return init statement to be | |
2795 | output later. */ | |
2796 | static gimple | |
1b369fae | 2797 | setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn, |
e21aff8a | 2798 | basic_block bb, tree *vars) |
6de9cd9a | 2799 | { |
0f1961a2 | 2800 | gimple init_stmt = NULL; |
6de9cd9a | 2801 | tree var; |
f4088621 | 2802 | tree rhs = value; |
110cfe1c | 2803 | tree def = (gimple_in_ssa_p (cfun) |
32244553 | 2804 | ? ssa_default_def (id->src_cfun, p) : NULL); |
6de9cd9a | 2805 | |
f4088621 RG |
2806 | if (value |
2807 | && value != error_mark_node | |
2808 | && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))) | |
c54e3854 | 2809 | { |
c4ac6e94 | 2810 | /* If we can match up types by promotion/demotion do so. */ |
c54e3854 | 2811 | if (fold_convertible_p (TREE_TYPE (p), value)) |
c4ac6e94 | 2812 | rhs = fold_convert (TREE_TYPE (p), value); |
c54e3854 | 2813 | else |
c4ac6e94 RG |
2814 | { |
2815 | /* ??? For valid programs we should not end up here. | |
2816 | Still if we end up with truly mismatched types here, fall back | |
2817 | to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid | |
2818 | GIMPLE to the following passes. */ | |
2819 | if (!is_gimple_reg_type (TREE_TYPE (value)) | |
2820 | || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value))) | |
2821 | rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value); | |
2822 | else | |
2823 | rhs = build_zero_cst (TREE_TYPE (p)); | |
2824 | } | |
c54e3854 | 2825 | } |
f4088621 | 2826 | |
b5b8b0ac AO |
2827 | /* Make an equivalent VAR_DECL. Note that we must NOT remap the type |
2828 | here since the type of this decl must be visible to the calling | |
2829 | function. */ | |
2830 | var = copy_decl_to_var (p, id); | |
2831 | ||
b5b8b0ac | 2832 | /* Declare this new variable. */ |
910ad8de | 2833 | DECL_CHAIN (var) = *vars; |
b5b8b0ac AO |
2834 | *vars = var; |
2835 | ||
2836 | /* Make gimplifier happy about this variable. */ | |
2837 | DECL_SEEN_IN_BIND_EXPR_P (var) = 1; | |
2838 | ||
110cfe1c | 2839 | /* If the parameter is never assigned to, has no SSA_NAMEs created, |
b5b8b0ac AO |
2840 | we would not need to create a new variable here at all, if it |
2841 | weren't for debug info. Still, we can just use the argument | |
2842 | value. */ | |
6de9cd9a DN |
2843 | if (TREE_READONLY (p) |
2844 | && !TREE_ADDRESSABLE (p) | |
110cfe1c JH |
2845 | && value && !TREE_SIDE_EFFECTS (value) |
2846 | && !def) | |
6de9cd9a | 2847 | { |
84936f6f RH |
2848 | /* We may produce non-gimple trees by adding NOPs or introduce |
2849 | invalid sharing when operand is not really constant. | |
2850 | It is not big deal to prohibit constant propagation here as | |
2851 | we will constant propagate in DOM1 pass anyway. */ | |
2852 | if (is_gimple_min_invariant (value) | |
f4088621 RG |
2853 | && useless_type_conversion_p (TREE_TYPE (p), |
2854 | TREE_TYPE (value)) | |
04482133 AO |
2855 | /* We have to be very careful about ADDR_EXPR. Make sure |
2856 | the base variable isn't a local variable of the inlined | |
2857 | function, e.g., when doing recursive inlining, direct or | |
2858 | mutually-recursive or whatever, which is why we don't | |
2859 | just test whether fn == current_function_decl. */ | |
2860 | && ! self_inlining_addr_expr (value, fn)) | |
6de9cd9a | 2861 | { |
6de9cd9a | 2862 | insert_decl_map (id, p, value); |
b5b8b0ac AO |
2863 | insert_debug_decl_map (id, p, var); |
2864 | return insert_init_debug_bind (id, bb, var, value, NULL); | |
6de9cd9a DN |
2865 | } |
2866 | } | |
2867 | ||
6de9cd9a DN |
2868 | /* Register the VAR_DECL as the equivalent for the PARM_DECL; |
2869 | that way, when the PARM_DECL is encountered, it will be | |
2870 | automatically replaced by the VAR_DECL. */ | |
7c7d3047 | 2871 | insert_decl_map (id, p, var); |
6de9cd9a | 2872 | |
6de9cd9a DN |
2873 | /* Even if P was TREE_READONLY, the new VAR should not be. |
2874 | In the original code, we would have constructed a | |
2875 | temporary, and then the function body would have never | |
2876 | changed the value of P. However, now, we will be | |
2877 | constructing VAR directly. The constructor body may | |
2878 | change its value multiple times as it is being | |
2879 | constructed. Therefore, it must not be TREE_READONLY; | |
2880 | the back-end assumes that TREE_READONLY variable is | |
2881 | assigned to only once. */ | |
2882 | if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p))) | |
2883 | TREE_READONLY (var) = 0; | |
2884 | ||
110cfe1c JH |
2885 | /* If there is no setup required and we are in SSA, take the easy route |
2886 | replacing all SSA names representing the function parameter by the | |
2887 | SSA name passed to function. | |
2888 | ||
2889 | We need to construct map for the variable anyway as it might be used | |
2890 | in different SSA names when parameter is set in function. | |
2891 | ||
8454d27e JH |
2892 | Do replacement at -O0 for const arguments replaced by constant. |
2893 | This is important for builtin_constant_p and other construct requiring | |
b5b8b0ac | 2894 | constant argument to be visible in inlined function body. */ |
110cfe1c | 2895 | if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p) |
8454d27e JH |
2896 | && (optimize |
2897 | || (TREE_READONLY (p) | |
2898 | && is_gimple_min_invariant (rhs))) | |
110cfe1c | 2899 | && (TREE_CODE (rhs) == SSA_NAME |
9b718f81 JH |
2900 | || is_gimple_min_invariant (rhs)) |
2901 | && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)) | |
110cfe1c JH |
2902 | { |
2903 | insert_decl_map (id, def, rhs); | |
b5b8b0ac | 2904 | return insert_init_debug_bind (id, bb, var, rhs, NULL); |
110cfe1c JH |
2905 | } |
2906 | ||
f6f2da7d JH |
2907 | /* If the value of argument is never used, don't care about initializing |
2908 | it. */ | |
1cf5abb3 | 2909 | if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p)) |
f6f2da7d JH |
2910 | { |
2911 | gcc_assert (!value || !TREE_SIDE_EFFECTS (value)); | |
b5b8b0ac | 2912 | return insert_init_debug_bind (id, bb, var, rhs, NULL); |
f6f2da7d JH |
2913 | } |
2914 | ||
6de9cd9a DN |
2915 | /* Initialize this VAR_DECL from the equivalent argument. Convert |
2916 | the argument to the proper type in case it was promoted. */ | |
2917 | if (value) | |
2918 | { | |
6de9cd9a | 2919 | if (rhs == error_mark_node) |
110cfe1c | 2920 | { |
7c7d3047 | 2921 | insert_decl_map (id, p, var); |
b5b8b0ac | 2922 | return insert_init_debug_bind (id, bb, var, rhs, NULL); |
110cfe1c | 2923 | } |
afe08db5 | 2924 | |
73dab33b | 2925 | STRIP_USELESS_TYPE_CONVERSION (rhs); |
6de9cd9a | 2926 | |
6b18b1a3 | 2927 | /* If we are in SSA form properly remap the default definition |
27eb31c9 RG |
2928 | or assign to a dummy SSA name if the parameter is unused and |
2929 | we are not optimizing. */ | |
6b18b1a3 | 2930 | if (gimple_in_ssa_p (cfun) && is_gimple_reg (p)) |
110cfe1c | 2931 | { |
6b18b1a3 RG |
2932 | if (def) |
2933 | { | |
2934 | def = remap_ssa_name (def, id); | |
2935 | init_stmt = gimple_build_assign (def, rhs); | |
2936 | SSA_NAME_IS_DEFAULT_DEF (def) = 0; | |
32244553 | 2937 | set_ssa_default_def (cfun, var, NULL); |
6b18b1a3 | 2938 | } |
27eb31c9 RG |
2939 | else if (!optimize) |
2940 | { | |
2941 | def = make_ssa_name (var, NULL); | |
2942 | init_stmt = gimple_build_assign (def, rhs); | |
2943 | } | |
110cfe1c JH |
2944 | } |
2945 | else | |
726a989a | 2946 | init_stmt = gimple_build_assign (var, rhs); |
6de9cd9a | 2947 | |
0f1961a2 | 2948 | if (bb && init_stmt) |
b5b8b0ac | 2949 | insert_init_stmt (id, bb, init_stmt); |
6de9cd9a | 2950 | } |
0f1961a2 | 2951 | return init_stmt; |
6de9cd9a DN |
2952 | } |
2953 | ||
d4e4baa9 | 2954 | /* Generate code to initialize the parameters of the function at the |
726a989a | 2955 | top of the stack in ID from the GIMPLE_CALL STMT. */ |
d4e4baa9 | 2956 | |
e21aff8a | 2957 | static void |
726a989a | 2958 | initialize_inlined_parameters (copy_body_data *id, gimple stmt, |
e21aff8a | 2959 | tree fn, basic_block bb) |
d4e4baa9 | 2960 | { |
d4e4baa9 | 2961 | tree parms; |
726a989a | 2962 | size_t i; |
d4e4baa9 | 2963 | tree p; |
d436bff8 | 2964 | tree vars = NULL_TREE; |
726a989a | 2965 | tree static_chain = gimple_call_chain (stmt); |
d4e4baa9 AO |
2966 | |
2967 | /* Figure out what the parameters are. */ | |
18c6ada9 | 2968 | parms = DECL_ARGUMENTS (fn); |
d4e4baa9 | 2969 | |
d4e4baa9 AO |
2970 | /* Loop through the parameter declarations, replacing each with an |
2971 | equivalent VAR_DECL, appropriately initialized. */ | |
910ad8de | 2972 | for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++) |
726a989a RB |
2973 | { |
2974 | tree val; | |
2975 | val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL; | |
2976 | setup_one_parameter (id, p, val, fn, bb, &vars); | |
2977 | } | |
ea184343 RG |
2978 | /* After remapping parameters remap their types. This has to be done |
2979 | in a second loop over all parameters to appropriately remap | |
2980 | variable sized arrays when the size is specified in a | |
2981 | parameter following the array. */ | |
910ad8de | 2982 | for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++) |
ea184343 RG |
2983 | { |
2984 | tree *varp = (tree *) pointer_map_contains (id->decl_map, p); | |
2985 | if (varp | |
2986 | && TREE_CODE (*varp) == VAR_DECL) | |
2987 | { | |
72aa3dca | 2988 | tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p) |
32244553 | 2989 | ? ssa_default_def (id->src_cfun, p) : NULL); |
72aa3dca RG |
2990 | tree var = *varp; |
2991 | TREE_TYPE (var) = remap_type (TREE_TYPE (var), id); | |
ea184343 RG |
2992 | /* Also remap the default definition if it was remapped |
2993 | to the default definition of the parameter replacement | |
2994 | by the parameter setup. */ | |
72aa3dca | 2995 | if (def) |
ea184343 RG |
2996 | { |
2997 | tree *defp = (tree *) pointer_map_contains (id->decl_map, def); | |
2998 | if (defp | |
2999 | && TREE_CODE (*defp) == SSA_NAME | |
72aa3dca RG |
3000 | && SSA_NAME_VAR (*defp) == var) |
3001 | TREE_TYPE (*defp) = TREE_TYPE (var); | |
ea184343 RG |
3002 | } |
3003 | } | |
3004 | } | |
4838c5ee | 3005 | |
6de9cd9a DN |
3006 | /* Initialize the static chain. */ |
3007 | p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl; | |
ea99e0be | 3008 | gcc_assert (fn != current_function_decl); |
6de9cd9a DN |
3009 | if (p) |
3010 | { | |
3011 | /* No static chain? Seems like a bug in tree-nested.c. */ | |
1e128c5f | 3012 | gcc_assert (static_chain); |
4838c5ee | 3013 | |
e21aff8a | 3014 | setup_one_parameter (id, p, static_chain, fn, bb, &vars); |
4838c5ee AO |
3015 | } |
3016 | ||
e21aff8a | 3017 | declare_inline_vars (id->block, vars); |
d4e4baa9 AO |
3018 | } |
3019 | ||
726a989a | 3020 | |
e21aff8a SB |
3021 | /* Declare a return variable to replace the RESULT_DECL for the |
3022 | function we are calling. An appropriate DECL_STMT is returned. | |
3023 | The USE_STMT is filled to contain a use of the declaration to | |
3024 | indicate the return value of the function. | |
3025 | ||
110cfe1c JH |
3026 | RETURN_SLOT, if non-null is place where to store the result. It |
3027 | is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null, | |
726a989a | 3028 | was the LHS of the MODIFY_EXPR to which this call is the RHS. |
7740f00d | 3029 | |
0f900dfa JJ |
3030 | The return value is a (possibly null) value that holds the result |
3031 | as seen by the caller. */ | |
d4e4baa9 | 3032 | |
d436bff8 | 3033 | static tree |
6938f93f JH |
3034 | declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest, |
3035 | basic_block entry_bb) | |
d4e4baa9 | 3036 | { |
1b369fae | 3037 | tree callee = id->src_fn; |
7740f00d RH |
3038 | tree result = DECL_RESULT (callee); |
3039 | tree callee_type = TREE_TYPE (result); | |
ea2edf88 | 3040 | tree caller_type; |
7740f00d | 3041 | tree var, use; |
d4e4baa9 | 3042 | |
ea2edf88 RG |
3043 | /* Handle type-mismatches in the function declaration return type |
3044 | vs. the call expression. */ | |
3045 | if (modify_dest) | |
3046 | caller_type = TREE_TYPE (modify_dest); | |
3047 | else | |
3048 | caller_type = TREE_TYPE (TREE_TYPE (callee)); | |
3049 | ||
1a2c27e9 EB |
3050 | /* We don't need to do anything for functions that don't return anything. */ |
3051 | if (VOID_TYPE_P (callee_type)) | |
0f900dfa | 3052 | return NULL_TREE; |
d4e4baa9 | 3053 | |
cc77ae10 | 3054 | /* If there was a return slot, then the return value is the |
7740f00d | 3055 | dereferenced address of that object. */ |
110cfe1c | 3056 | if (return_slot) |
7740f00d | 3057 | { |
110cfe1c | 3058 | /* The front end shouldn't have used both return_slot and |
7740f00d | 3059 | a modify expression. */ |
1e128c5f | 3060 | gcc_assert (!modify_dest); |
cc77ae10 | 3061 | if (DECL_BY_REFERENCE (result)) |
110cfe1c JH |
3062 | { |
3063 | tree return_slot_addr = build_fold_addr_expr (return_slot); | |
3064 | STRIP_USELESS_TYPE_CONVERSION (return_slot_addr); | |
3065 | ||
3066 | /* We are going to construct *&return_slot and we can't do that | |
b8698a0f | 3067 | for variables believed to be not addressable. |
110cfe1c JH |
3068 | |
3069 | FIXME: This check possibly can match, because values returned | |
3070 | via return slot optimization are not believed to have address | |
3071 | taken by alias analysis. */ | |
3072 | gcc_assert (TREE_CODE (return_slot) != SSA_NAME); | |
110cfe1c JH |
3073 | var = return_slot_addr; |
3074 | } | |
cc77ae10 | 3075 | else |
110cfe1c JH |
3076 | { |
3077 | var = return_slot; | |
3078 | gcc_assert (TREE_CODE (var) != SSA_NAME); | |
b5ca517c | 3079 | TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result); |
110cfe1c | 3080 | } |
0890b981 AP |
3081 | if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE |
3082 | || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE) | |
3083 | && !DECL_GIMPLE_REG_P (result) | |
22918034 | 3084 | && DECL_P (var)) |
0890b981 | 3085 | DECL_GIMPLE_REG_P (var) = 0; |
7740f00d RH |
3086 | use = NULL; |
3087 | goto done; | |
3088 | } | |
3089 | ||
3090 | /* All types requiring non-trivial constructors should have been handled. */ | |
1e128c5f | 3091 | gcc_assert (!TREE_ADDRESSABLE (callee_type)); |
7740f00d RH |
3092 | |
3093 | /* Attempt to avoid creating a new temporary variable. */ | |
110cfe1c JH |
3094 | if (modify_dest |
3095 | && TREE_CODE (modify_dest) != SSA_NAME) | |
7740f00d RH |
3096 | { |
3097 | bool use_it = false; | |
3098 | ||
3099 | /* We can't use MODIFY_DEST if there's type promotion involved. */ | |
f4088621 | 3100 | if (!useless_type_conversion_p (callee_type, caller_type)) |
7740f00d RH |
3101 | use_it = false; |
3102 | ||
3103 | /* ??? If we're assigning to a variable sized type, then we must | |
3104 | reuse the destination variable, because we've no good way to | |
3105 | create variable sized temporaries at this point. */ | |
3106 | else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST) | |
3107 | use_it = true; | |
3108 | ||
3109 | /* If the callee cannot possibly modify MODIFY_DEST, then we can | |
3110 | reuse it as the result of the call directly. Don't do this if | |
3111 | it would promote MODIFY_DEST to addressable. */ | |
e2f9fe42 RH |
3112 | else if (TREE_ADDRESSABLE (result)) |
3113 | use_it = false; | |
3114 | else | |
3115 | { | |
3116 | tree base_m = get_base_address (modify_dest); | |
3117 | ||
3118 | /* If the base isn't a decl, then it's a pointer, and we don't | |
3119 | know where that's going to go. */ | |
3120 | if (!DECL_P (base_m)) | |
3121 | use_it = false; | |
3122 | else if (is_global_var (base_m)) | |
3123 | use_it = false; | |
0890b981 AP |
3124 | else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE |
3125 | || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE) | |
3126 | && !DECL_GIMPLE_REG_P (result) | |
3127 | && DECL_GIMPLE_REG_P (base_m)) | |
1d327c16 | 3128 | use_it = false; |
e2f9fe42 RH |
3129 | else if (!TREE_ADDRESSABLE (base_m)) |
3130 | use_it = true; | |
3131 | } | |
7740f00d RH |
3132 | |
3133 | if (use_it) | |
3134 | { | |
3135 | var = modify_dest; | |
3136 | use = NULL; | |
3137 | goto done; | |
3138 | } | |
3139 | } | |
3140 | ||
1e128c5f | 3141 | gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST); |
7740f00d | 3142 | |
c08cd4c1 | 3143 | var = copy_result_decl_to_var (result, id); |
7740f00d | 3144 | DECL_SEEN_IN_BIND_EXPR_P (var) = 1; |
7740f00d | 3145 | |
6de9cd9a | 3146 | /* Do not have the rest of GCC warn about this variable as it should |
471854f8 | 3147 | not be visible to the user. */ |
6de9cd9a | 3148 | TREE_NO_WARNING (var) = 1; |
d4e4baa9 | 3149 | |
c08cd4c1 JM |
3150 | declare_inline_vars (id->block, var); |
3151 | ||
7740f00d RH |
3152 | /* Build the use expr. If the return type of the function was |
3153 | promoted, convert it back to the expected type. */ | |
3154 | use = var; | |
f4088621 | 3155 | if (!useless_type_conversion_p (caller_type, TREE_TYPE (var))) |
c4ac6e94 RG |
3156 | { |
3157 | /* If we can match up types by promotion/demotion do so. */ | |
3158 | if (fold_convertible_p (caller_type, var)) | |
3159 | use = fold_convert (caller_type, var); | |
3160 | else | |
3161 | { | |
3162 | /* ??? For valid programs we should not end up here. | |
3163 | Still if we end up with truly mismatched types here, fall back | |
3164 | to using a MEM_REF to not leak invalid GIMPLE to the following | |
3165 | passes. */ | |
3166 | /* Prevent var from being written into SSA form. */ | |
3167 | if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE | |
3168 | || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE) | |
3169 | DECL_GIMPLE_REG_P (var) = false; | |
3170 | else if (is_gimple_reg_type (TREE_TYPE (var))) | |
3171 | TREE_ADDRESSABLE (var) = true; | |
3172 | use = fold_build2 (MEM_REF, caller_type, | |
3173 | build_fold_addr_expr (var), | |
3174 | build_int_cst (ptr_type_node, 0)); | |
3175 | } | |
3176 | } | |
b8698a0f | 3177 | |
73dab33b | 3178 | STRIP_USELESS_TYPE_CONVERSION (use); |
7740f00d | 3179 | |
c08cd4c1 | 3180 | if (DECL_BY_REFERENCE (result)) |
32848948 RG |
3181 | { |
3182 | TREE_ADDRESSABLE (var) = 1; | |
3183 | var = build_fold_addr_expr (var); | |
3184 | } | |
c08cd4c1 | 3185 | |
7740f00d | 3186 | done: |
d4e4baa9 AO |
3187 | /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that |
3188 | way, when the RESULT_DECL is encountered, it will be | |
6938f93f JH |
3189 | automatically replaced by the VAR_DECL. |
3190 | ||
3191 | When returning by reference, ensure that RESULT_DECL remaps to | |
3192 | gimple_val. */ | |
3193 | if (DECL_BY_REFERENCE (result) | |
3194 | && !is_gimple_val (var)) | |
3195 | { | |
3196 | tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr"); | |
3197 | insert_decl_map (id, result, temp); | |
6b18b1a3 RG |
3198 | /* When RESULT_DECL is in SSA form, we need to remap and initialize |
3199 | it's default_def SSA_NAME. */ | |
3200 | if (gimple_in_ssa_p (id->src_cfun) | |
3201 | && is_gimple_reg (result)) | |
3202 | { | |
3203 | temp = make_ssa_name (temp, NULL); | |
32244553 | 3204 | insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp); |
6b18b1a3 | 3205 | } |
6938f93f JH |
3206 | insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var)); |
3207 | } | |
3208 | else | |
3209 | insert_decl_map (id, result, var); | |
d4e4baa9 | 3210 | |
6de9cd9a DN |
3211 | /* Remember this so we can ignore it in remap_decls. */ |
3212 | id->retvar = var; | |
3213 | ||
0f900dfa | 3214 | return use; |
d4e4baa9 AO |
3215 | } |
3216 | ||
27dbd3ac RH |
3217 | /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference |
3218 | to a local label. */ | |
4838c5ee | 3219 | |
27dbd3ac RH |
3220 | static tree |
3221 | has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp) | |
4838c5ee | 3222 | { |
27dbd3ac RH |
3223 | tree node = *nodep; |
3224 | tree fn = (tree) fnp; | |
726a989a | 3225 | |
27dbd3ac RH |
3226 | if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn) |
3227 | return node; | |
3228 | ||
3229 | if (TYPE_P (node)) | |
3230 | *walk_subtrees = 0; | |
3231 | ||
3232 | return NULL_TREE; | |
3233 | } | |
726a989a | 3234 | |
27dbd3ac RH |
3235 | /* Determine if the function can be copied. If so return NULL. If |
3236 | not return a string describng the reason for failure. */ | |
3237 | ||
3238 | static const char * | |
3239 | copy_forbidden (struct function *fun, tree fndecl) | |
3240 | { | |
3241 | const char *reason = fun->cannot_be_copied_reason; | |
c021f10b NF |
3242 | tree decl; |
3243 | unsigned ix; | |
27dbd3ac RH |
3244 | |
3245 | /* Only examine the function once. */ | |
3246 | if (fun->cannot_be_copied_set) | |
3247 | return reason; | |
3248 | ||
3249 | /* We cannot copy a function that receives a non-local goto | |
3250 | because we cannot remap the destination label used in the | |
3251 | function that is performing the non-local goto. */ | |
3252 | /* ??? Actually, this should be possible, if we work at it. | |
3253 | No doubt there's just a handful of places that simply | |
3254 | assume it doesn't happen and don't substitute properly. */ | |
3255 | if (fun->has_nonlocal_label) | |
3256 | { | |
3257 | reason = G_("function %q+F can never be copied " | |
3258 | "because it receives a non-local goto"); | |
3259 | goto fail; | |
3260 | } | |
3261 | ||
c021f10b NF |
3262 | FOR_EACH_LOCAL_DECL (fun, ix, decl) |
3263 | if (TREE_CODE (decl) == VAR_DECL | |
3264 | && TREE_STATIC (decl) | |
3265 | && !DECL_EXTERNAL (decl) | |
3266 | && DECL_INITIAL (decl) | |
3267 | && walk_tree_without_duplicates (&DECL_INITIAL (decl), | |
3268 | has_label_address_in_static_1, | |
3269 | fndecl)) | |
3270 | { | |
3271 | reason = G_("function %q+F can never be copied because it saves " | |
3272 | "address of local label in a static variable"); | |
3273 | goto fail; | |
3274 | } | |
27dbd3ac RH |
3275 | |
3276 | fail: | |
3277 | fun->cannot_be_copied_reason = reason; | |
3278 | fun->cannot_be_copied_set = true; | |
3279 | return reason; | |
3280 | } | |
3281 | ||
3282 | ||
3283 | static const char *inline_forbidden_reason; | |
3284 | ||
3285 | /* A callback for walk_gimple_seq to handle statements. Returns non-null | |
3286 | iff a function can not be inlined. Also sets the reason why. */ | |
c986baf6 | 3287 | |
c986baf6 | 3288 | static tree |
726a989a RB |
3289 | inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p, |
3290 | struct walk_stmt_info *wip) | |
c986baf6 | 3291 | { |
726a989a | 3292 | tree fn = (tree) wip->info; |
f08545a8 | 3293 | tree t; |
726a989a | 3294 | gimple stmt = gsi_stmt (*gsi); |
c986baf6 | 3295 | |
726a989a | 3296 | switch (gimple_code (stmt)) |
f08545a8 | 3297 | { |
726a989a | 3298 | case GIMPLE_CALL: |
3197c4fd AS |
3299 | /* Refuse to inline alloca call unless user explicitly forced so as |
3300 | this may change program's memory overhead drastically when the | |
3301 | function using alloca is called in loop. In GCC present in | |
3302 | SPEC2000 inlining into schedule_block cause it to require 2GB of | |
63d2a353 MM |
3303 | RAM instead of 256MB. Don't do so for alloca calls emitted for |
3304 | VLA objects as those can't cause unbounded growth (they're always | |
3305 | wrapped inside stack_save/stack_restore regions. */ | |
726a989a | 3306 | if (gimple_alloca_call_p (stmt) |
63d2a353 | 3307 | && !gimple_call_alloca_for_var_p (stmt) |
f08545a8 JH |
3308 | && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))) |
3309 | { | |
ddd2d57e | 3310 | inline_forbidden_reason |
dee15844 | 3311 | = G_("function %q+F can never be inlined because it uses " |
ddd2d57e | 3312 | "alloca (override using the always_inline attribute)"); |
726a989a RB |
3313 | *handled_ops_p = true; |
3314 | return fn; | |
f08545a8 | 3315 | } |
726a989a RB |
3316 | |
3317 | t = gimple_call_fndecl (stmt); | |
3318 | if (t == NULL_TREE) | |
f08545a8 | 3319 | break; |
84f5e1b1 | 3320 | |
f08545a8 JH |
3321 | /* We cannot inline functions that call setjmp. */ |
3322 | if (setjmp_call_p (t)) | |
3323 | { | |
ddd2d57e | 3324 | inline_forbidden_reason |
dee15844 | 3325 | = G_("function %q+F can never be inlined because it uses setjmp"); |
726a989a RB |
3326 | *handled_ops_p = true; |
3327 | return t; | |
f08545a8 JH |
3328 | } |
3329 | ||
6de9cd9a | 3330 | if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL) |
3197c4fd | 3331 | switch (DECL_FUNCTION_CODE (t)) |
f08545a8 | 3332 | { |
3197c4fd AS |
3333 | /* We cannot inline functions that take a variable number of |
3334 | arguments. */ | |
3335 | case BUILT_IN_VA_START: | |
3197c4fd AS |
3336 | case BUILT_IN_NEXT_ARG: |
3337 | case BUILT_IN_VA_END: | |
6de9cd9a | 3338 | inline_forbidden_reason |
dee15844 | 3339 | = G_("function %q+F can never be inlined because it " |
6de9cd9a | 3340 | "uses variable argument lists"); |
726a989a RB |
3341 | *handled_ops_p = true; |
3342 | return t; | |
6de9cd9a | 3343 | |
3197c4fd | 3344 | case BUILT_IN_LONGJMP: |
6de9cd9a DN |
3345 | /* We can't inline functions that call __builtin_longjmp at |
3346 | all. The non-local goto machinery really requires the | |
3347 | destination be in a different function. If we allow the | |
3348 | function calling __builtin_longjmp to be inlined into the | |
3349 | function calling __builtin_setjmp, Things will Go Awry. */ | |
3350 | inline_forbidden_reason | |
dee15844 | 3351 | = G_("function %q+F can never be inlined because " |
6de9cd9a | 3352 | "it uses setjmp-longjmp exception handling"); |
726a989a RB |
3353 | *handled_ops_p = true; |
3354 | return t; | |
6de9cd9a DN |
3355 | |
3356 | case BUILT_IN_NONLOCAL_GOTO: | |
3357 | /* Similarly. */ | |
3358 | inline_forbidden_reason | |
dee15844 | 3359 | = G_("function %q+F can never be inlined because " |
6de9cd9a | 3360 | "it uses non-local goto"); |
726a989a RB |
3361 | *handled_ops_p = true; |
3362 | return t; | |
f08545a8 | 3363 | |
4b284111 JJ |
3364 | case BUILT_IN_RETURN: |
3365 | case BUILT_IN_APPLY_ARGS: | |
3366 | /* If a __builtin_apply_args caller would be inlined, | |
3367 | it would be saving arguments of the function it has | |
3368 | been inlined into. Similarly __builtin_return would | |
3369 | return from the function the inline has been inlined into. */ | |
3370 | inline_forbidden_reason | |
dee15844 | 3371 | = G_("function %q+F can never be inlined because " |
4b284111 | 3372 | "it uses __builtin_return or __builtin_apply_args"); |
726a989a RB |
3373 | *handled_ops_p = true; |
3374 | return t; | |
4b284111 | 3375 | |
3197c4fd AS |
3376 | default: |
3377 | break; | |
3378 | } | |
f08545a8 JH |
3379 | break; |
3380 | ||
726a989a RB |
3381 | case GIMPLE_GOTO: |
3382 | t = gimple_goto_dest (stmt); | |
f08545a8 JH |
3383 | |
3384 | /* We will not inline a function which uses computed goto. The | |
3385 | addresses of its local labels, which may be tucked into | |
3386 | global storage, are of course not constant across | |
3387 | instantiations, which causes unexpected behavior. */ | |
3388 | if (TREE_CODE (t) != LABEL_DECL) | |
3389 | { | |
ddd2d57e | 3390 | inline_forbidden_reason |
dee15844 | 3391 | = G_("function %q+F can never be inlined " |
ddd2d57e | 3392 | "because it contains a computed goto"); |
726a989a RB |
3393 | *handled_ops_p = true; |
3394 | return t; | |
f08545a8 | 3395 | } |
6de9cd9a | 3396 | break; |
f08545a8 | 3397 | |
f08545a8 JH |
3398 | default: |
3399 | break; | |
3400 | } | |
3401 | ||
726a989a | 3402 | *handled_ops_p = false; |
f08545a8 | 3403 | return NULL_TREE; |
84f5e1b1 RH |
3404 | } |
3405 | ||
726a989a RB |
3406 | /* Return true if FNDECL is a function that cannot be inlined into |
3407 | another one. */ | |
3408 | ||
3409 | static bool | |
f08545a8 | 3410 | inline_forbidden_p (tree fndecl) |
84f5e1b1 | 3411 | { |
2092ee7d | 3412 | struct function *fun = DECL_STRUCT_FUNCTION (fndecl); |
726a989a RB |
3413 | struct walk_stmt_info wi; |
3414 | struct pointer_set_t *visited_nodes; | |
3415 | basic_block bb; | |
3416 | bool forbidden_p = false; | |
3417 | ||
27dbd3ac RH |
3418 | /* First check for shared reasons not to copy the code. */ |
3419 | inline_forbidden_reason = copy_forbidden (fun, fndecl); | |
3420 | if (inline_forbidden_reason != NULL) | |
3421 | return true; | |
3422 | ||
3423 | /* Next, walk the statements of the function looking for | |
3424 | constraucts we can't handle, or are non-optimal for inlining. */ | |
726a989a RB |
3425 | visited_nodes = pointer_set_create (); |
3426 | memset (&wi, 0, sizeof (wi)); | |
3427 | wi.info = (void *) fndecl; | |
3428 | wi.pset = visited_nodes; | |
e21aff8a | 3429 | |
2092ee7d | 3430 | FOR_EACH_BB_FN (bb, fun) |
726a989a RB |
3431 | { |
3432 | gimple ret; | |
3433 | gimple_seq seq = bb_seq (bb); | |
27dbd3ac | 3434 | ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi); |
726a989a RB |
3435 | forbidden_p = (ret != NULL); |
3436 | if (forbidden_p) | |
27dbd3ac | 3437 | break; |
2092ee7d JJ |
3438 | } |
3439 | ||
726a989a | 3440 | pointer_set_destroy (visited_nodes); |
726a989a | 3441 | return forbidden_p; |
84f5e1b1 | 3442 | } |
6399c0ab SB |
3443 | \f |
3444 | /* Return false if the function FNDECL cannot be inlined on account of its | |
3445 | attributes, true otherwise. */ | |
3446 | static bool | |
3447 | function_attribute_inlinable_p (const_tree fndecl) | |
3448 | { | |
3449 | if (targetm.attribute_table) | |
3450 | { | |
3451 | const_tree a; | |
3452 | ||
3453 | for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a)) | |
3454 | { | |
3455 | const_tree name = TREE_PURPOSE (a); | |
3456 | int i; | |
3457 | ||
3458 | for (i = 0; targetm.attribute_table[i].name != NULL; i++) | |
3459 | if (is_attribute_p (targetm.attribute_table[i].name, name)) | |
3460 | return targetm.function_attribute_inlinable_p (fndecl); | |
3461 | } | |
3462 | } | |
3463 | ||
3464 | return true; | |
3465 | } | |
84f5e1b1 | 3466 | |
b3c3af2f SB |
3467 | /* Returns nonzero if FN is a function that does not have any |
3468 | fundamental inline blocking properties. */ | |
d4e4baa9 | 3469 | |
27dbd3ac RH |
3470 | bool |
3471 | tree_inlinable_function_p (tree fn) | |
d4e4baa9 | 3472 | { |
b3c3af2f | 3473 | bool inlinable = true; |
18177c7e RG |
3474 | bool do_warning; |
3475 | tree always_inline; | |
d4e4baa9 AO |
3476 | |
3477 | /* If we've already decided this function shouldn't be inlined, | |
3478 | there's no need to check again. */ | |
3479 | if (DECL_UNINLINABLE (fn)) | |
b3c3af2f | 3480 | return false; |
d4e4baa9 | 3481 | |
18177c7e RG |
3482 | /* We only warn for functions declared `inline' by the user. */ |
3483 | do_warning = (warn_inline | |
18177c7e | 3484 | && DECL_DECLARED_INLINE_P (fn) |
0494626a | 3485 | && !DECL_NO_INLINE_WARNING_P (fn) |
18177c7e RG |
3486 | && !DECL_IN_SYSTEM_HEADER (fn)); |
3487 | ||
3488 | always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)); | |
3489 | ||
e90acd93 | 3490 | if (flag_no_inline |
18177c7e RG |
3491 | && always_inline == NULL) |
3492 | { | |
3493 | if (do_warning) | |
3494 | warning (OPT_Winline, "function %q+F can never be inlined because it " | |
3495 | "is suppressed using -fno-inline", fn); | |
3496 | inlinable = false; | |
3497 | } | |
3498 | ||
18177c7e RG |
3499 | else if (!function_attribute_inlinable_p (fn)) |
3500 | { | |
3501 | if (do_warning) | |
3502 | warning (OPT_Winline, "function %q+F can never be inlined because it " | |
3503 | "uses attributes conflicting with inlining", fn); | |
3504 | inlinable = false; | |
3505 | } | |
46c5ad27 | 3506 | |
f08545a8 | 3507 | else if (inline_forbidden_p (fn)) |
b3c3af2f SB |
3508 | { |
3509 | /* See if we should warn about uninlinable functions. Previously, | |
3510 | some of these warnings would be issued while trying to expand | |
3511 | the function inline, but that would cause multiple warnings | |
3512 | about functions that would for example call alloca. But since | |
3513 | this a property of the function, just one warning is enough. | |
3514 | As a bonus we can now give more details about the reason why a | |
18177c7e RG |
3515 | function is not inlinable. */ |
3516 | if (always_inline) | |
c9fc06dc | 3517 | error (inline_forbidden_reason, fn); |
2d327012 | 3518 | else if (do_warning) |
d2fcbf6f | 3519 | warning (OPT_Winline, inline_forbidden_reason, fn); |
b3c3af2f SB |
3520 | |
3521 | inlinable = false; | |
3522 | } | |
d4e4baa9 AO |
3523 | |
3524 | /* Squirrel away the result so that we don't have to check again. */ | |
b3c3af2f | 3525 | DECL_UNINLINABLE (fn) = !inlinable; |
d4e4baa9 | 3526 | |
b3c3af2f SB |
3527 | return inlinable; |
3528 | } | |
3529 | ||
e5c4f28a RG |
3530 | /* Estimate the cost of a memory move. Use machine dependent |
3531 | word size and take possible memcpy call into account. */ | |
3532 | ||
3533 | int | |
3534 | estimate_move_cost (tree type) | |
3535 | { | |
3536 | HOST_WIDE_INT size; | |
3537 | ||
078c3644 JH |
3538 | gcc_assert (!VOID_TYPE_P (type)); |
3539 | ||
c204d113 L |
3540 | if (TREE_CODE (type) == VECTOR_TYPE) |
3541 | { | |
3542 | enum machine_mode inner = TYPE_MODE (TREE_TYPE (type)); | |
3543 | enum machine_mode simd | |
3544 | = targetm.vectorize.preferred_simd_mode (inner); | |
3545 | int simd_mode_size = GET_MODE_SIZE (simd); | |
3546 | return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1) | |
3547 | / simd_mode_size); | |
3548 | } | |
3549 | ||
e5c4f28a RG |
3550 | size = int_size_in_bytes (type); |
3551 | ||
e04ad03d | 3552 | if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size)) |
e5c4f28a RG |
3553 | /* Cost of a memcpy call, 3 arguments and the call. */ |
3554 | return 4; | |
3555 | else | |
3556 | return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES); | |
3557 | } | |
3558 | ||
726a989a | 3559 | /* Returns cost of operation CODE, according to WEIGHTS */ |
7f9bc51b | 3560 | |
726a989a | 3561 | static int |
02f0b13a JH |
3562 | estimate_operator_cost (enum tree_code code, eni_weights *weights, |
3563 | tree op1 ATTRIBUTE_UNUSED, tree op2) | |
6de9cd9a | 3564 | { |
726a989a | 3565 | switch (code) |
6de9cd9a | 3566 | { |
726a989a RB |
3567 | /* These are "free" conversions, or their presumed cost |
3568 | is folded into other operations. */ | |
61fcaeec | 3569 | case RANGE_EXPR: |
1a87cf0c | 3570 | CASE_CONVERT: |
726a989a RB |
3571 | case COMPLEX_EXPR: |
3572 | case PAREN_EXPR: | |
d4d92cd3 | 3573 | case VIEW_CONVERT_EXPR: |
726a989a | 3574 | return 0; |
6de9cd9a | 3575 | |
e5c4f28a RG |
3576 | /* Assign cost of 1 to usual operations. |
3577 | ??? We may consider mapping RTL costs to this. */ | |
6de9cd9a | 3578 | case COND_EXPR: |
4151978d | 3579 | case VEC_COND_EXPR: |
2205ed25 | 3580 | case VEC_PERM_EXPR: |
6de9cd9a DN |
3581 | |
3582 | case PLUS_EXPR: | |
5be014d5 | 3583 | case POINTER_PLUS_EXPR: |
6de9cd9a DN |
3584 | case MINUS_EXPR: |
3585 | case MULT_EXPR: | |
98449720 | 3586 | case MULT_HIGHPART_EXPR: |
16949072 | 3587 | case FMA_EXPR: |
6de9cd9a | 3588 | |
09e881c9 | 3589 | case ADDR_SPACE_CONVERT_EXPR: |
325217ed | 3590 | case FIXED_CONVERT_EXPR: |
6de9cd9a | 3591 | case FIX_TRUNC_EXPR: |
6de9cd9a DN |
3592 | |
3593 | case NEGATE_EXPR: | |
3594 | case FLOAT_EXPR: | |
3595 | case MIN_EXPR: | |
3596 | case MAX_EXPR: | |
3597 | case ABS_EXPR: | |
3598 | ||
3599 | case LSHIFT_EXPR: | |
3600 | case RSHIFT_EXPR: | |
3601 | case LROTATE_EXPR: | |
3602 | case RROTATE_EXPR: | |
a6b46ba2 DN |
3603 | case VEC_LSHIFT_EXPR: |
3604 | case VEC_RSHIFT_EXPR: | |
6de9cd9a DN |
3605 | |
3606 | case BIT_IOR_EXPR: | |
3607 | case BIT_XOR_EXPR: | |
3608 | case BIT_AND_EXPR: | |
3609 | case BIT_NOT_EXPR: | |
3610 | ||
3611 | case TRUTH_ANDIF_EXPR: | |
3612 | case TRUTH_ORIF_EXPR: | |
3613 | case TRUTH_AND_EXPR: | |
3614 | case TRUTH_OR_EXPR: | |
3615 | case TRUTH_XOR_EXPR: | |
3616 | case TRUTH_NOT_EXPR: | |
3617 | ||
3618 | case LT_EXPR: | |
3619 | case LE_EXPR: | |
3620 | case GT_EXPR: | |
3621 | case GE_EXPR: | |
3622 | case EQ_EXPR: | |
3623 | case NE_EXPR: | |
3624 | case ORDERED_EXPR: | |
3625 | case UNORDERED_EXPR: | |
3626 | ||
3627 | case UNLT_EXPR: | |
3628 | case UNLE_EXPR: | |
3629 | case UNGT_EXPR: | |
3630 | case UNGE_EXPR: | |
3631 | case UNEQ_EXPR: | |
d1a7edaf | 3632 | case LTGT_EXPR: |
6de9cd9a | 3633 | |
6de9cd9a DN |
3634 | case CONJ_EXPR: |
3635 | ||
3636 | case PREDECREMENT_EXPR: | |
3637 | case PREINCREMENT_EXPR: | |
3638 | case POSTDECREMENT_EXPR: | |
3639 | case POSTINCREMENT_EXPR: | |
3640 | ||
16630a2c DN |
3641 | case REALIGN_LOAD_EXPR: |
3642 | ||
61d3cdbb DN |
3643 | case REDUC_MAX_EXPR: |
3644 | case REDUC_MIN_EXPR: | |
3645 | case REDUC_PLUS_EXPR: | |
20f06221 | 3646 | case WIDEN_SUM_EXPR: |
726a989a RB |
3647 | case WIDEN_MULT_EXPR: |
3648 | case DOT_PROD_EXPR: | |
0354c0c7 BS |
3649 | case WIDEN_MULT_PLUS_EXPR: |
3650 | case WIDEN_MULT_MINUS_EXPR: | |
36ba4aae | 3651 | case WIDEN_LSHIFT_EXPR: |
726a989a | 3652 | |
89d67cca DN |
3653 | case VEC_WIDEN_MULT_HI_EXPR: |
3654 | case VEC_WIDEN_MULT_LO_EXPR: | |
3f30a9a6 RH |
3655 | case VEC_WIDEN_MULT_EVEN_EXPR: |
3656 | case VEC_WIDEN_MULT_ODD_EXPR: | |
89d67cca DN |
3657 | case VEC_UNPACK_HI_EXPR: |
3658 | case VEC_UNPACK_LO_EXPR: | |
d9987fb4 UB |
3659 | case VEC_UNPACK_FLOAT_HI_EXPR: |
3660 | case VEC_UNPACK_FLOAT_LO_EXPR: | |
8115817b | 3661 | case VEC_PACK_TRUNC_EXPR: |
89d67cca | 3662 | case VEC_PACK_SAT_EXPR: |
d9987fb4 | 3663 | case VEC_PACK_FIX_TRUNC_EXPR: |
36ba4aae IR |
3664 | case VEC_WIDEN_LSHIFT_HI_EXPR: |
3665 | case VEC_WIDEN_LSHIFT_LO_EXPR: | |
98b44b0e | 3666 | |
726a989a | 3667 | return 1; |
6de9cd9a | 3668 | |
1ea7e6ad | 3669 | /* Few special cases of expensive operations. This is useful |
6de9cd9a DN |
3670 | to avoid inlining on functions having too many of these. */ |
3671 | case TRUNC_DIV_EXPR: | |
3672 | case CEIL_DIV_EXPR: | |
3673 | case FLOOR_DIV_EXPR: | |
3674 | case ROUND_DIV_EXPR: | |
3675 | case EXACT_DIV_EXPR: | |
3676 | case TRUNC_MOD_EXPR: | |
3677 | case CEIL_MOD_EXPR: | |
3678 | case FLOOR_MOD_EXPR: | |
3679 | case ROUND_MOD_EXPR: | |
3680 | case RDIV_EXPR: | |
02f0b13a JH |
3681 | if (TREE_CODE (op2) != INTEGER_CST) |
3682 | return weights->div_mod_cost; | |
3683 | return 1; | |
726a989a RB |
3684 | |
3685 | default: | |
3686 | /* We expect a copy assignment with no operator. */ | |
3687 | gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS); | |
3688 | return 0; | |
3689 | } | |
3690 | } | |
3691 | ||
3692 | ||
3693 | /* Estimate number of instructions that will be created by expanding | |
3694 | the statements in the statement sequence STMTS. | |
3695 | WEIGHTS contains weights attributed to various constructs. */ | |
3696 | ||
3697 | static | |
3698 | int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights) | |
3699 | { | |
3700 | int cost; | |
3701 | gimple_stmt_iterator gsi; | |
3702 | ||
3703 | cost = 0; | |
3704 | for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3705 | cost += estimate_num_insns (gsi_stmt (gsi), weights); | |
3706 | ||
3707 | return cost; | |
3708 | } | |
3709 | ||
3710 | ||
3711 | /* Estimate number of instructions that will be created by expanding STMT. | |
3712 | WEIGHTS contains weights attributed to various constructs. */ | |
3713 | ||
3714 | int | |
3715 | estimate_num_insns (gimple stmt, eni_weights *weights) | |
3716 | { | |
3717 | unsigned cost, i; | |
3718 | enum gimple_code code = gimple_code (stmt); | |
3719 | tree lhs; | |
02f0b13a | 3720 | tree rhs; |
726a989a RB |
3721 | |
3722 | switch (code) | |
3723 | { | |
3724 | case GIMPLE_ASSIGN: | |
3725 | /* Try to estimate the cost of assignments. We have three cases to | |
3726 | deal with: | |
3727 | 1) Simple assignments to registers; | |
3728 | 2) Stores to things that must live in memory. This includes | |
3729 | "normal" stores to scalars, but also assignments of large | |
3730 | structures, or constructors of big arrays; | |
3731 | ||
3732 | Let us look at the first two cases, assuming we have "a = b + C": | |
3733 | <GIMPLE_ASSIGN <var_decl "a"> | |
3734 | <plus_expr <var_decl "b"> <constant C>> | |
3735 | If "a" is a GIMPLE register, the assignment to it is free on almost | |
3736 | any target, because "a" usually ends up in a real register. Hence | |
3737 | the only cost of this expression comes from the PLUS_EXPR, and we | |
3738 | can ignore the GIMPLE_ASSIGN. | |
3739 | If "a" is not a GIMPLE register, the assignment to "a" will most | |
3740 | likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost | |
3741 | of moving something into "a", which we compute using the function | |
3742 | estimate_move_cost. */ | |
bccc50d4 JJ |
3743 | if (gimple_clobber_p (stmt)) |
3744 | return 0; /* ={v} {CLOBBER} stmt expands to nothing. */ | |
3745 | ||
726a989a | 3746 | lhs = gimple_assign_lhs (stmt); |
02f0b13a JH |
3747 | rhs = gimple_assign_rhs1 (stmt); |
3748 | ||
c12d9242 | 3749 | cost = 0; |
726a989a | 3750 | |
c12d9242 RB |
3751 | /* Account for the cost of moving to / from memory. */ |
3752 | if (gimple_store_p (stmt)) | |
3753 | cost += estimate_move_cost (TREE_TYPE (lhs)); | |
3754 | if (gimple_assign_load_p (stmt)) | |
02f0b13a JH |
3755 | cost += estimate_move_cost (TREE_TYPE (rhs)); |
3756 | ||
3757 | cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights, | |
3758 | gimple_assign_rhs1 (stmt), | |
3759 | get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
3760 | == GIMPLE_BINARY_RHS | |
3761 | ? gimple_assign_rhs2 (stmt) : NULL); | |
726a989a RB |
3762 | break; |
3763 | ||
3764 | case GIMPLE_COND: | |
02f0b13a JH |
3765 | cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights, |
3766 | gimple_op (stmt, 0), | |
3767 | gimple_op (stmt, 1)); | |
726a989a RB |
3768 | break; |
3769 | ||
3770 | case GIMPLE_SWITCH: | |
3771 | /* Take into account cost of the switch + guess 2 conditional jumps for | |
b8698a0f | 3772 | each case label. |
726a989a RB |
3773 | |
3774 | TODO: once the switch expansion logic is sufficiently separated, we can | |
3775 | do better job on estimating cost of the switch. */ | |
02f0b13a JH |
3776 | if (weights->time_based) |
3777 | cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2; | |
3778 | else | |
3779 | cost = gimple_switch_num_labels (stmt) * 2; | |
6de9cd9a | 3780 | break; |
726a989a RB |
3781 | |
3782 | case GIMPLE_CALL: | |
6de9cd9a | 3783 | { |
726a989a | 3784 | tree decl = gimple_call_fndecl (stmt); |
d2d668fb | 3785 | struct cgraph_node *node = NULL; |
6de9cd9a | 3786 | |
9bb2f479 JH |
3787 | /* Do not special case builtins where we see the body. |
3788 | This just confuse inliner. */ | |
67348ccc | 3789 | if (!decl || !(node = cgraph_get_node (decl)) || node->definition) |
e9f7ad79 | 3790 | ; |
9bb2f479 JH |
3791 | /* For buitins that are likely expanded to nothing or |
3792 | inlined do not account operand costs. */ | |
3793 | else if (is_simple_builtin (decl)) | |
bec922f0 SL |
3794 | return 0; |
3795 | else if (is_inexpensive_builtin (decl)) | |
9bb2f479 | 3796 | return weights->target_builtin_call_cost; |
e9f7ad79 RG |
3797 | else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL) |
3798 | { | |
3799 | /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so | |
3800 | specialize the cheap expansion we do here. | |
3801 | ??? This asks for a more general solution. */ | |
3802 | switch (DECL_FUNCTION_CODE (decl)) | |
3803 | { | |
3804 | case BUILT_IN_POW: | |
3805 | case BUILT_IN_POWF: | |
3806 | case BUILT_IN_POWL: | |
3807 | if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST | |
3808 | && REAL_VALUES_EQUAL | |
3809 | (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2)) | |
3810 | return estimate_operator_cost (MULT_EXPR, weights, | |
3811 | gimple_call_arg (stmt, 0), | |
3812 | gimple_call_arg (stmt, 0)); | |
3813 | break; | |
3814 | ||
3815 | default: | |
3816 | break; | |
3817 | } | |
3818 | } | |
b8698a0f | 3819 | |
d2d668fb | 3820 | cost = node ? weights->call_cost : weights->indirect_call_cost; |
3c04921b RG |
3821 | if (gimple_call_lhs (stmt)) |
3822 | cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt))); | |
3823 | for (i = 0; i < gimple_call_num_args (stmt); i++) | |
c7f599d0 | 3824 | { |
3c04921b RG |
3825 | tree arg = gimple_call_arg (stmt, i); |
3826 | cost += estimate_move_cost (TREE_TYPE (arg)); | |
c7f599d0 | 3827 | } |
6de9cd9a DN |
3828 | break; |
3829 | } | |
88f4034b | 3830 | |
9bb2f479 JH |
3831 | case GIMPLE_RETURN: |
3832 | return weights->return_cost; | |
3833 | ||
726a989a RB |
3834 | case GIMPLE_GOTO: |
3835 | case GIMPLE_LABEL: | |
3836 | case GIMPLE_NOP: | |
3837 | case GIMPLE_PHI: | |
726a989a | 3838 | case GIMPLE_PREDICT: |
b5b8b0ac | 3839 | case GIMPLE_DEBUG: |
726a989a RB |
3840 | return 0; |
3841 | ||
3842 | case GIMPLE_ASM: | |
cc4029ee AK |
3843 | { |
3844 | int count = asm_str_count (gimple_asm_string (stmt)); | |
3845 | /* 1000 means infinity. This avoids overflows later | |
3846 | with very long asm statements. */ | |
3847 | if (count > 1000) | |
3848 | count = 1000; | |
3849 | return count; | |
3850 | } | |
726a989a | 3851 | |
1d65f45c RH |
3852 | case GIMPLE_RESX: |
3853 | /* This is either going to be an external function call with one | |
3854 | argument, or two register copy statements plus a goto. */ | |
3855 | return 2; | |
3856 | ||
3857 | case GIMPLE_EH_DISPATCH: | |
3858 | /* ??? This is going to turn into a switch statement. Ideally | |
3859 | we'd have a look at the eh region and estimate the number of | |
3860 | edges involved. */ | |
3861 | return 10; | |
3862 | ||
726a989a RB |
3863 | case GIMPLE_BIND: |
3864 | return estimate_num_insns_seq (gimple_bind_body (stmt), weights); | |
3865 | ||
3866 | case GIMPLE_EH_FILTER: | |
3867 | return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights); | |
3868 | ||
3869 | case GIMPLE_CATCH: | |
3870 | return estimate_num_insns_seq (gimple_catch_handler (stmt), weights); | |
3871 | ||
3872 | case GIMPLE_TRY: | |
3873 | return (estimate_num_insns_seq (gimple_try_eval (stmt), weights) | |
3874 | + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights)); | |
3875 | ||
3876 | /* OpenMP directives are generally very expensive. */ | |
3877 | ||
3878 | case GIMPLE_OMP_RETURN: | |
3879 | case GIMPLE_OMP_SECTIONS_SWITCH: | |
3880 | case GIMPLE_OMP_ATOMIC_STORE: | |
3881 | case GIMPLE_OMP_CONTINUE: | |
3882 | /* ...except these, which are cheap. */ | |
3883 | return 0; | |
3884 | ||
3885 | case GIMPLE_OMP_ATOMIC_LOAD: | |
3886 | return weights->omp_cost; | |
3887 | ||
3888 | case GIMPLE_OMP_FOR: | |
3889 | return (weights->omp_cost | |
3890 | + estimate_num_insns_seq (gimple_omp_body (stmt), weights) | |
3891 | + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights)); | |
3892 | ||
3893 | case GIMPLE_OMP_PARALLEL: | |
3894 | case GIMPLE_OMP_TASK: | |
3895 | case GIMPLE_OMP_CRITICAL: | |
3896 | case GIMPLE_OMP_MASTER: | |
acf0174b | 3897 | case GIMPLE_OMP_TASKGROUP: |
726a989a RB |
3898 | case GIMPLE_OMP_ORDERED: |
3899 | case GIMPLE_OMP_SECTION: | |
3900 | case GIMPLE_OMP_SECTIONS: | |
3901 | case GIMPLE_OMP_SINGLE: | |
acf0174b JJ |
3902 | case GIMPLE_OMP_TARGET: |
3903 | case GIMPLE_OMP_TEAMS: | |
726a989a RB |
3904 | return (weights->omp_cost |
3905 | + estimate_num_insns_seq (gimple_omp_body (stmt), weights)); | |
88f4034b | 3906 | |
0a35513e AH |
3907 | case GIMPLE_TRANSACTION: |
3908 | return (weights->tm_cost | |
3909 | + estimate_num_insns_seq (gimple_transaction_body (stmt), | |
3910 | weights)); | |
3911 | ||
6de9cd9a | 3912 | default: |
1e128c5f | 3913 | gcc_unreachable (); |
6de9cd9a | 3914 | } |
726a989a RB |
3915 | |
3916 | return cost; | |
6de9cd9a DN |
3917 | } |
3918 | ||
726a989a RB |
3919 | /* Estimate number of instructions that will be created by expanding |
3920 | function FNDECL. WEIGHTS contains weights attributed to various | |
3921 | constructs. */ | |
aa4a53af | 3922 | |
6de9cd9a | 3923 | int |
726a989a | 3924 | estimate_num_insns_fn (tree fndecl, eni_weights *weights) |
6de9cd9a | 3925 | { |
726a989a RB |
3926 | struct function *my_function = DECL_STRUCT_FUNCTION (fndecl); |
3927 | gimple_stmt_iterator bsi; | |
e21aff8a | 3928 | basic_block bb; |
726a989a | 3929 | int n = 0; |
e21aff8a | 3930 | |
726a989a RB |
3931 | gcc_assert (my_function && my_function->cfg); |
3932 | FOR_EACH_BB_FN (bb, my_function) | |
e21aff8a | 3933 | { |
726a989a RB |
3934 | for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) |
3935 | n += estimate_num_insns (gsi_stmt (bsi), weights); | |
e21aff8a | 3936 | } |
e21aff8a | 3937 | |
726a989a | 3938 | return n; |
7f9bc51b ZD |
3939 | } |
3940 | ||
726a989a | 3941 | |
7f9bc51b ZD |
3942 | /* Initializes weights used by estimate_num_insns. */ |
3943 | ||
3944 | void | |
3945 | init_inline_once (void) | |
3946 | { | |
7f9bc51b | 3947 | eni_size_weights.call_cost = 1; |
d2d668fb | 3948 | eni_size_weights.indirect_call_cost = 3; |
625a2efb | 3949 | eni_size_weights.target_builtin_call_cost = 1; |
7f9bc51b | 3950 | eni_size_weights.div_mod_cost = 1; |
7f9bc51b | 3951 | eni_size_weights.omp_cost = 40; |
0a35513e | 3952 | eni_size_weights.tm_cost = 10; |
02f0b13a | 3953 | eni_size_weights.time_based = false; |
9bb2f479 | 3954 | eni_size_weights.return_cost = 1; |
7f9bc51b ZD |
3955 | |
3956 | /* Estimating time for call is difficult, since we have no idea what the | |
3957 | called function does. In the current uses of eni_time_weights, | |
3958 | underestimating the cost does less harm than overestimating it, so | |
ea2c620c | 3959 | we choose a rather small value here. */ |
7f9bc51b | 3960 | eni_time_weights.call_cost = 10; |
d2d668fb | 3961 | eni_time_weights.indirect_call_cost = 15; |
9bb2f479 | 3962 | eni_time_weights.target_builtin_call_cost = 1; |
7f9bc51b | 3963 | eni_time_weights.div_mod_cost = 10; |
7f9bc51b | 3964 | eni_time_weights.omp_cost = 40; |
0a35513e | 3965 | eni_time_weights.tm_cost = 40; |
02f0b13a | 3966 | eni_time_weights.time_based = true; |
9bb2f479 | 3967 | eni_time_weights.return_cost = 2; |
6de9cd9a DN |
3968 | } |
3969 | ||
726a989a RB |
3970 | /* Estimate the number of instructions in a gimple_seq. */ |
3971 | ||
3972 | int | |
3973 | count_insns_seq (gimple_seq seq, eni_weights *weights) | |
3974 | { | |
3975 | gimple_stmt_iterator gsi; | |
3976 | int n = 0; | |
3977 | for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3978 | n += estimate_num_insns (gsi_stmt (gsi), weights); | |
3979 | ||
3980 | return n; | |
3981 | } | |
3982 | ||
3983 | ||
e21aff8a | 3984 | /* Install new lexical TREE_BLOCK underneath 'current_block'. */ |
726a989a | 3985 | |
e21aff8a | 3986 | static void |
4a283090 | 3987 | prepend_lexical_block (tree current_block, tree new_block) |
e21aff8a | 3988 | { |
4a283090 JH |
3989 | BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block); |
3990 | BLOCK_SUBBLOCKS (current_block) = new_block; | |
e21aff8a | 3991 | BLOCK_SUPERCONTEXT (new_block) = current_block; |
e21aff8a SB |
3992 | } |
3993 | ||
c021f10b NF |
3994 | /* Add local variables from CALLEE to CALLER. */ |
3995 | ||
3996 | static inline void | |
3997 | add_local_variables (struct function *callee, struct function *caller, | |
ae0379fc | 3998 | copy_body_data *id) |
c021f10b NF |
3999 | { |
4000 | tree var; | |
4001 | unsigned ix; | |
4002 | ||
4003 | FOR_EACH_LOCAL_DECL (callee, ix, var) | |
ae0379fc | 4004 | if (!can_be_nonlocal (var, id)) |
42694189 JJ |
4005 | { |
4006 | tree new_var = remap_decl (var, id); | |
4007 | ||
4008 | /* Remap debug-expressions. */ | |
4009 | if (TREE_CODE (new_var) == VAR_DECL | |
839b422f | 4010 | && DECL_HAS_DEBUG_EXPR_P (var) |
42694189 JJ |
4011 | && new_var != var) |
4012 | { | |
4013 | tree tem = DECL_DEBUG_EXPR (var); | |
4014 | bool old_regimplify = id->regimplify; | |
4015 | id->remapping_type_depth++; | |
4016 | walk_tree (&tem, copy_tree_body_r, id, NULL); | |
4017 | id->remapping_type_depth--; | |
4018 | id->regimplify = old_regimplify; | |
4019 | SET_DECL_DEBUG_EXPR (new_var, tem); | |
839b422f | 4020 | DECL_HAS_DEBUG_EXPR_P (new_var) = 1; |
42694189 JJ |
4021 | } |
4022 | add_local_decl (caller, new_var); | |
4023 | } | |
c021f10b NF |
4024 | } |
4025 | ||
726a989a | 4026 | /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */ |
d4e4baa9 | 4027 | |
e21aff8a | 4028 | static bool |
726a989a | 4029 | expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id) |
d4e4baa9 | 4030 | { |
0f900dfa | 4031 | tree use_retvar; |
d436bff8 | 4032 | tree fn; |
b5b8b0ac | 4033 | struct pointer_map_t *st, *dst; |
110cfe1c | 4034 | tree return_slot; |
7740f00d | 4035 | tree modify_dest; |
6de9cd9a | 4036 | location_t saved_location; |
e21aff8a | 4037 | struct cgraph_edge *cg_edge; |
61a05df1 | 4038 | cgraph_inline_failed_t reason; |
e21aff8a SB |
4039 | basic_block return_block; |
4040 | edge e; | |
726a989a | 4041 | gimple_stmt_iterator gsi, stmt_gsi; |
e21aff8a | 4042 | bool successfully_inlined = FALSE; |
4f6c2131 | 4043 | bool purge_dead_abnormal_edges; |
d4e4baa9 | 4044 | |
6de9cd9a DN |
4045 | /* Set input_location here so we get the right instantiation context |
4046 | if we call instantiate_decl from inlinable_function_p. */ | |
532aafad | 4047 | /* FIXME: instantiate_decl isn't called by inlinable_function_p. */ |
6de9cd9a | 4048 | saved_location = input_location; |
035775c8 | 4049 | input_location = gimple_location (stmt); |
6de9cd9a | 4050 | |
d4e4baa9 | 4051 | /* From here on, we're only interested in CALL_EXPRs. */ |
726a989a | 4052 | if (gimple_code (stmt) != GIMPLE_CALL) |
6de9cd9a | 4053 | goto egress; |
d4e4baa9 | 4054 | |
db09f943 MJ |
4055 | cg_edge = cgraph_edge (id->dst_node, stmt); |
4056 | gcc_checking_assert (cg_edge); | |
d4e4baa9 AO |
4057 | /* First, see if we can figure out what function is being called. |
4058 | If we cannot, then there is no hope of inlining the function. */ | |
db09f943 | 4059 | if (cg_edge->indirect_unknown_callee) |
3949c4a7 | 4060 | goto egress; |
67348ccc | 4061 | fn = cg_edge->callee->decl; |
db09f943 | 4062 | gcc_checking_assert (fn); |
b58b1157 | 4063 | |
726a989a | 4064 | /* If FN is a declaration of a function in a nested scope that was |
a1a0fd4e AO |
4065 | globally declared inline, we don't set its DECL_INITIAL. |
4066 | However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the | |
4067 | C++ front-end uses it for cdtors to refer to their internal | |
4068 | declarations, that are not real functions. Fortunately those | |
4069 | don't have trees to be saved, so we can tell by checking their | |
726a989a RB |
4070 | gimple_body. */ |
4071 | if (!DECL_INITIAL (fn) | |
a1a0fd4e | 4072 | && DECL_ABSTRACT_ORIGIN (fn) |
39ecc018 | 4073 | && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn))) |
a1a0fd4e AO |
4074 | fn = DECL_ABSTRACT_ORIGIN (fn); |
4075 | ||
8f4f502f | 4076 | /* Don't try to inline functions that are not well-suited to inlining. */ |
9c8305f8 | 4077 | if (cg_edge->inline_failed) |
a833faa5 | 4078 | { |
9c8305f8 | 4079 | reason = cg_edge->inline_failed; |
3e293154 MJ |
4080 | /* If this call was originally indirect, we do not want to emit any |
4081 | inlining related warnings or sorry messages because there are no | |
4082 | guarantees regarding those. */ | |
e33c6cd6 | 4083 | if (cg_edge->indirect_inlining_edge) |
3e293154 MJ |
4084 | goto egress; |
4085 | ||
7fac66d4 | 4086 | if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)) |
bfc61b40 JH |
4087 | /* For extern inline functions that get redefined we always |
4088 | silently ignored always_inline flag. Better behaviour would | |
4089 | be to be able to keep both bodies and use extern inline body | |
4090 | for inlining, but we can't do that because frontends overwrite | |
4091 | the body. */ | |
4092 | && !cg_edge->callee->local.redefined_extern_inline | |
df9dda2d ST |
4093 | /* During early inline pass, report only when optimization is |
4094 | not turned on. */ | |
4095 | && (cgraph_global_info_ready | |
4096 | || !optimize) | |
c9fc06dc CB |
4097 | /* PR 20090218-1_0.c. Body can be provided by another module. */ |
4098 | && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto)) | |
2d327012 | 4099 | { |
c9fc06dc CB |
4100 | error ("inlining failed in call to always_inline %q+F: %s", fn, |
4101 | cgraph_inline_failed_string (reason)); | |
4102 | error ("called from here"); | |
2d327012 | 4103 | } |
ff7037dc EB |
4104 | else if (warn_inline |
4105 | && DECL_DECLARED_INLINE_P (fn) | |
4106 | && !DECL_NO_INLINE_WARNING_P (fn) | |
2d327012 | 4107 | && !DECL_IN_SYSTEM_HEADER (fn) |
61a05df1 | 4108 | && reason != CIF_UNSPECIFIED |
d63db217 | 4109 | && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)) |
d7d1d041 RG |
4110 | /* Do not warn about not inlined recursive calls. */ |
4111 | && !cgraph_edge_recursive_p (cg_edge) | |
d63db217 | 4112 | /* Avoid warnings during early inline pass. */ |
7e8b322a | 4113 | && cgraph_global_info_ready) |
a833faa5 | 4114 | { |
dee15844 | 4115 | warning (OPT_Winline, "inlining failed in call to %q+F: %s", |
49c8958b | 4116 | fn, _(cgraph_inline_failed_string (reason))); |
3176a0c2 | 4117 | warning (OPT_Winline, "called from here"); |
a833faa5 | 4118 | } |
6de9cd9a | 4119 | goto egress; |
a833faa5 | 4120 | } |
67348ccc | 4121 | fn = cg_edge->callee->decl; |
a2e2a668 | 4122 | cgraph_get_body (cg_edge->callee); |
d4e4baa9 | 4123 | |
18c6ada9 | 4124 | #ifdef ENABLE_CHECKING |
67348ccc | 4125 | if (cg_edge->callee->decl != id->dst_node->decl) |
e21aff8a | 4126 | verify_cgraph_node (cg_edge->callee); |
18c6ada9 JH |
4127 | #endif |
4128 | ||
e21aff8a | 4129 | /* We will be inlining this callee. */ |
1d65f45c | 4130 | id->eh_lp_nr = lookup_stmt_eh_lp (stmt); |
e21aff8a | 4131 | |
f9417da1 | 4132 | /* Update the callers EH personality. */ |
67348ccc DM |
4133 | if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)) |
4134 | DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl) | |
4135 | = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl); | |
f9417da1 | 4136 | |
726a989a | 4137 | /* Split the block holding the GIMPLE_CALL. */ |
e21aff8a SB |
4138 | e = split_block (bb, stmt); |
4139 | bb = e->src; | |
4140 | return_block = e->dest; | |
4141 | remove_edge (e); | |
4142 | ||
4f6c2131 EB |
4143 | /* split_block splits after the statement; work around this by |
4144 | moving the call into the second block manually. Not pretty, | |
4145 | but seems easier than doing the CFG manipulation by hand | |
726a989a RB |
4146 | when the GIMPLE_CALL is in the last statement of BB. */ |
4147 | stmt_gsi = gsi_last_bb (bb); | |
4148 | gsi_remove (&stmt_gsi, false); | |
4f6c2131 | 4149 | |
726a989a | 4150 | /* If the GIMPLE_CALL was in the last statement of BB, it may have |
4f6c2131 EB |
4151 | been the source of abnormal edges. In this case, schedule |
4152 | the removal of dead abnormal edges. */ | |
726a989a RB |
4153 | gsi = gsi_start_bb (return_block); |
4154 | if (gsi_end_p (gsi)) | |
e21aff8a | 4155 | { |
726a989a | 4156 | gsi_insert_after (&gsi, stmt, GSI_NEW_STMT); |
4f6c2131 | 4157 | purge_dead_abnormal_edges = true; |
e21aff8a | 4158 | } |
4f6c2131 EB |
4159 | else |
4160 | { | |
726a989a | 4161 | gsi_insert_before (&gsi, stmt, GSI_NEW_STMT); |
4f6c2131 EB |
4162 | purge_dead_abnormal_edges = false; |
4163 | } | |
4164 | ||
726a989a | 4165 | stmt_gsi = gsi_start_bb (return_block); |
742a37d5 | 4166 | |
d436bff8 AH |
4167 | /* Build a block containing code to initialize the arguments, the |
4168 | actual inline expansion of the body, and a label for the return | |
4169 | statements within the function to jump to. The type of the | |
3e492e9c RB |
4170 | statement expression is the return type of the function call. |
4171 | ??? If the call does not have an associated block then we will | |
4172 | remap all callee blocks to NULL, effectively dropping most of | |
4173 | its debug information. This should only happen for calls to | |
4174 | artificial decls inserted by the compiler itself. We need to | |
4175 | either link the inlined blocks into the caller block tree or | |
4176 | not refer to them in any way to not break GC for locations. */ | |
5368224f | 4177 | if (gimple_block (stmt)) |
3e492e9c RB |
4178 | { |
4179 | id->block = make_node (BLOCK); | |
4180 | BLOCK_ABSTRACT_ORIGIN (id->block) = fn; | |
a9d5a059 | 4181 | BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location); |
3e492e9c RB |
4182 | prepend_lexical_block (gimple_block (stmt), id->block); |
4183 | } | |
e21aff8a | 4184 | |
d4e4baa9 AO |
4185 | /* Local declarations will be replaced by their equivalents in this |
4186 | map. */ | |
4187 | st = id->decl_map; | |
6be42dd4 | 4188 | id->decl_map = pointer_map_create (); |
b5b8b0ac AO |
4189 | dst = id->debug_map; |
4190 | id->debug_map = NULL; | |
d4e4baa9 | 4191 | |
e21aff8a | 4192 | /* Record the function we are about to inline. */ |
1b369fae RH |
4193 | id->src_fn = fn; |
4194 | id->src_node = cg_edge->callee; | |
110cfe1c | 4195 | id->src_cfun = DECL_STRUCT_FUNCTION (fn); |
726a989a | 4196 | id->gimple_call = stmt; |
1b369fae | 4197 | |
3c8da8a5 AO |
4198 | gcc_assert (!id->src_cfun->after_inlining); |
4199 | ||
045685a9 | 4200 | id->entry_bb = bb; |
7299cb99 JH |
4201 | if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn))) |
4202 | { | |
4203 | gimple_stmt_iterator si = gsi_last_bb (bb); | |
4204 | gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION, | |
4205 | NOT_TAKEN), | |
4206 | GSI_NEW_STMT); | |
4207 | } | |
726a989a | 4208 | initialize_inlined_parameters (id, stmt, fn, bb); |
d4e4baa9 | 4209 | |
ea99e0be | 4210 | if (DECL_INITIAL (fn)) |
94645a02 | 4211 | { |
3e492e9c RB |
4212 | if (gimple_block (stmt)) |
4213 | { | |
4214 | tree *var; | |
4215 | ||
4216 | prepend_lexical_block (id->block, | |
4217 | remap_blocks (DECL_INITIAL (fn), id)); | |
4218 | gcc_checking_assert (BLOCK_SUBBLOCKS (id->block) | |
4219 | && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block)) | |
4220 | == NULL_TREE)); | |
4221 | /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block, | |
4222 | otherwise for DWARF DW_TAG_formal_parameter will not be children of | |
4223 | DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block | |
4224 | under it. The parameters can be then evaluated in the debugger, | |
4225 | but don't show in backtraces. */ | |
4226 | for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; ) | |
4227 | if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL) | |
4228 | { | |
4229 | tree v = *var; | |
4230 | *var = TREE_CHAIN (v); | |
4231 | TREE_CHAIN (v) = BLOCK_VARS (id->block); | |
4232 | BLOCK_VARS (id->block) = v; | |
4233 | } | |
4234 | else | |
4235 | var = &TREE_CHAIN (*var); | |
4236 | } | |
4237 | else | |
4238 | remap_blocks_to_null (DECL_INITIAL (fn), id); | |
94645a02 | 4239 | } |
acb8f212 | 4240 | |
d4e4baa9 AO |
4241 | /* Return statements in the function body will be replaced by jumps |
4242 | to the RET_LABEL. */ | |
1e128c5f GB |
4243 | gcc_assert (DECL_INITIAL (fn)); |
4244 | gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK); | |
23700f65 | 4245 | |
726a989a | 4246 | /* Find the LHS to which the result of this call is assigned. */ |
110cfe1c | 4247 | return_slot = NULL; |
726a989a | 4248 | if (gimple_call_lhs (stmt)) |
81bafd36 | 4249 | { |
726a989a | 4250 | modify_dest = gimple_call_lhs (stmt); |
81bafd36 ILT |
4251 | |
4252 | /* The function which we are inlining might not return a value, | |
4253 | in which case we should issue a warning that the function | |
4254 | does not return a value. In that case the optimizers will | |
4255 | see that the variable to which the value is assigned was not | |
4256 | initialized. We do not want to issue a warning about that | |
4257 | uninitialized variable. */ | |
4258 | if (DECL_P (modify_dest)) | |
4259 | TREE_NO_WARNING (modify_dest) = 1; | |
726a989a RB |
4260 | |
4261 | if (gimple_call_return_slot_opt_p (stmt)) | |
fa47911c | 4262 | { |
110cfe1c | 4263 | return_slot = modify_dest; |
fa47911c JM |
4264 | modify_dest = NULL; |
4265 | } | |
81bafd36 | 4266 | } |
7740f00d RH |
4267 | else |
4268 | modify_dest = NULL; | |
4269 | ||
1ea193c2 ILT |
4270 | /* If we are inlining a call to the C++ operator new, we don't want |
4271 | to use type based alias analysis on the return value. Otherwise | |
4272 | we may get confused if the compiler sees that the inlined new | |
4273 | function returns a pointer which was just deleted. See bug | |
4274 | 33407. */ | |
4275 | if (DECL_IS_OPERATOR_NEW (fn)) | |
4276 | { | |
4277 | return_slot = NULL; | |
4278 | modify_dest = NULL; | |
4279 | } | |
4280 | ||
d4e4baa9 | 4281 | /* Declare the return variable for the function. */ |
6938f93f | 4282 | use_retvar = declare_return_variable (id, return_slot, modify_dest, bb); |
1ea193c2 | 4283 | |
acb8f212 | 4284 | /* Add local vars in this inlined callee to caller. */ |
ae0379fc | 4285 | add_local_variables (id->src_cfun, cfun, id); |
acb8f212 | 4286 | |
0d63a740 JH |
4287 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4288 | { | |
4289 | fprintf (dump_file, "Inlining "); | |
b8698a0f | 4290 | print_generic_expr (dump_file, id->src_fn, 0); |
0d63a740 | 4291 | fprintf (dump_file, " to "); |
b8698a0f | 4292 | print_generic_expr (dump_file, id->dst_fn, 0); |
0d63a740 JH |
4293 | fprintf (dump_file, " with frequency %i\n", cg_edge->frequency); |
4294 | } | |
4295 | ||
eb50f5f4 JH |
4296 | /* This is it. Duplicate the callee body. Assume callee is |
4297 | pre-gimplified. Note that we must not alter the caller | |
4298 | function in any way before this point, as this CALL_EXPR may be | |
4299 | a self-referential call; if we're calling ourselves, we need to | |
4300 | duplicate our body before altering anything. */ | |
0d63a740 | 4301 | copy_body (id, bb->count, |
8b47039c | 4302 | GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE), |
f3b331d1 | 4303 | bb, return_block, NULL); |
eb50f5f4 | 4304 | |
d086d311 | 4305 | /* Reset the escaped solution. */ |
6b8ed145 | 4306 | if (cfun->gimple_df) |
d086d311 | 4307 | pt_solution_reset (&cfun->gimple_df->escaped); |
6b8ed145 | 4308 | |
d4e4baa9 | 4309 | /* Clean up. */ |
b5b8b0ac AO |
4310 | if (id->debug_map) |
4311 | { | |
4312 | pointer_map_destroy (id->debug_map); | |
4313 | id->debug_map = dst; | |
4314 | } | |
6be42dd4 | 4315 | pointer_map_destroy (id->decl_map); |
d4e4baa9 AO |
4316 | id->decl_map = st; |
4317 | ||
5006671f RG |
4318 | /* Unlink the calls virtual operands before replacing it. */ |
4319 | unlink_stmt_vdef (stmt); | |
4320 | ||
84936f6f | 4321 | /* If the inlined function returns a result that we care about, |
726a989a RB |
4322 | substitute the GIMPLE_CALL with an assignment of the return |
4323 | variable to the LHS of the call. That is, if STMT was | |
4324 | 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */ | |
4325 | if (use_retvar && gimple_call_lhs (stmt)) | |
e21aff8a | 4326 | { |
726a989a RB |
4327 | gimple old_stmt = stmt; |
4328 | stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar); | |
4329 | gsi_replace (&stmt_gsi, stmt, false); | |
726a989a | 4330 | maybe_clean_or_replace_eh_stmt (old_stmt, stmt); |
e21aff8a | 4331 | } |
6de9cd9a | 4332 | else |
110cfe1c | 4333 | { |
726a989a RB |
4334 | /* Handle the case of inlining a function with no return |
4335 | statement, which causes the return value to become undefined. */ | |
4336 | if (gimple_call_lhs (stmt) | |
4337 | && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME) | |
110cfe1c | 4338 | { |
726a989a RB |
4339 | tree name = gimple_call_lhs (stmt); |
4340 | tree var = SSA_NAME_VAR (name); | |
32244553 | 4341 | tree def = ssa_default_def (cfun, var); |
110cfe1c | 4342 | |
110cfe1c JH |
4343 | if (def) |
4344 | { | |
726a989a RB |
4345 | /* If the variable is used undefined, make this name |
4346 | undefined via a move. */ | |
4347 | stmt = gimple_build_assign (gimple_call_lhs (stmt), def); | |
4348 | gsi_replace (&stmt_gsi, stmt, true); | |
110cfe1c | 4349 | } |
110cfe1c JH |
4350 | else |
4351 | { | |
726a989a RB |
4352 | /* Otherwise make this variable undefined. */ |
4353 | gsi_remove (&stmt_gsi, true); | |
32244553 | 4354 | set_ssa_default_def (cfun, var, name); |
726a989a | 4355 | SSA_NAME_DEF_STMT (name) = gimple_build_nop (); |
110cfe1c JH |
4356 | } |
4357 | } | |
4358 | else | |
726a989a | 4359 | gsi_remove (&stmt_gsi, true); |
110cfe1c | 4360 | } |
d4e4baa9 | 4361 | |
4f6c2131 | 4362 | if (purge_dead_abnormal_edges) |
30fd5881 EB |
4363 | { |
4364 | gimple_purge_dead_eh_edges (return_block); | |
4365 | gimple_purge_dead_abnormal_call_edges (return_block); | |
4366 | } | |
84936f6f | 4367 | |
e21aff8a SB |
4368 | /* If the value of the new expression is ignored, that's OK. We |
4369 | don't warn about this for CALL_EXPRs, so we shouldn't warn about | |
4370 | the equivalent inlined version either. */ | |
726a989a RB |
4371 | if (is_gimple_assign (stmt)) |
4372 | { | |
4373 | gcc_assert (gimple_assign_single_p (stmt) | |
1a87cf0c | 4374 | || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))); |
726a989a RB |
4375 | TREE_USED (gimple_assign_rhs1 (stmt)) = 1; |
4376 | } | |
84936f6f | 4377 | |
1eb3331e DB |
4378 | /* Output the inlining info for this abstract function, since it has been |
4379 | inlined. If we don't do this now, we can lose the information about the | |
4380 | variables in the function when the blocks get blown away as soon as we | |
4381 | remove the cgraph node. */ | |
3e492e9c | 4382 | if (gimple_block (stmt)) |
67348ccc | 4383 | (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl); |
84936f6f | 4384 | |
e72fcfe8 | 4385 | /* Update callgraph if needed. */ |
e21aff8a | 4386 | cgraph_remove_node (cg_edge->callee); |
e72fcfe8 | 4387 | |
e21aff8a | 4388 | id->block = NULL_TREE; |
e21aff8a | 4389 | successfully_inlined = TRUE; |
742a37d5 | 4390 | |
6de9cd9a DN |
4391 | egress: |
4392 | input_location = saved_location; | |
e21aff8a | 4393 | return successfully_inlined; |
d4e4baa9 | 4394 | } |
6de9cd9a | 4395 | |
e21aff8a SB |
4396 | /* Expand call statements reachable from STMT_P. |
4397 | We can only have CALL_EXPRs as the "toplevel" tree code or nested | |
0a35513e | 4398 | in a MODIFY_EXPR. */ |
e21aff8a SB |
4399 | |
4400 | static bool | |
1b369fae | 4401 | gimple_expand_calls_inline (basic_block bb, copy_body_data *id) |
6de9cd9a | 4402 | { |
726a989a | 4403 | gimple_stmt_iterator gsi; |
6de9cd9a | 4404 | |
726a989a | 4405 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
6de9cd9a | 4406 | { |
726a989a | 4407 | gimple stmt = gsi_stmt (gsi); |
e21aff8a | 4408 | |
726a989a RB |
4409 | if (is_gimple_call (stmt) |
4410 | && expand_call_inline (bb, stmt, id)) | |
4411 | return true; | |
6de9cd9a | 4412 | } |
726a989a | 4413 | |
e21aff8a | 4414 | return false; |
6de9cd9a DN |
4415 | } |
4416 | ||
726a989a | 4417 | |
b8a00a4d JH |
4418 | /* Walk all basic blocks created after FIRST and try to fold every statement |
4419 | in the STATEMENTS pointer set. */ | |
726a989a | 4420 | |
b8a00a4d JH |
4421 | static void |
4422 | fold_marked_statements (int first, struct pointer_set_t *statements) | |
4423 | { | |
726a989a | 4424 | for (; first < n_basic_blocks; first++) |
b8a00a4d JH |
4425 | if (BASIC_BLOCK (first)) |
4426 | { | |
726a989a RB |
4427 | gimple_stmt_iterator gsi; |
4428 | ||
4429 | for (gsi = gsi_start_bb (BASIC_BLOCK (first)); | |
4430 | !gsi_end_p (gsi); | |
4431 | gsi_next (&gsi)) | |
4432 | if (pointer_set_contains (statements, gsi_stmt (gsi))) | |
9477eb38 | 4433 | { |
726a989a | 4434 | gimple old_stmt = gsi_stmt (gsi); |
4b685e14 | 4435 | tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0; |
2bafad93 | 4436 | |
44e10129 MM |
4437 | if (old_decl && DECL_BUILT_IN (old_decl)) |
4438 | { | |
4439 | /* Folding builtins can create multiple instructions, | |
4440 | we need to look at all of them. */ | |
4441 | gimple_stmt_iterator i2 = gsi; | |
4442 | gsi_prev (&i2); | |
4443 | if (fold_stmt (&gsi)) | |
4444 | { | |
4445 | gimple new_stmt; | |
a9d24544 JJ |
4446 | /* If a builtin at the end of a bb folded into nothing, |
4447 | the following loop won't work. */ | |
4448 | if (gsi_end_p (gsi)) | |
4449 | { | |
4450 | cgraph_update_edges_for_call_stmt (old_stmt, | |
4451 | old_decl, NULL); | |
4452 | break; | |
4453 | } | |
44e10129 MM |
4454 | if (gsi_end_p (i2)) |
4455 | i2 = gsi_start_bb (BASIC_BLOCK (first)); | |
4456 | else | |
4457 | gsi_next (&i2); | |
4458 | while (1) | |
4459 | { | |
4460 | new_stmt = gsi_stmt (i2); | |
4461 | update_stmt (new_stmt); | |
4462 | cgraph_update_edges_for_call_stmt (old_stmt, old_decl, | |
4463 | new_stmt); | |
4464 | ||
4465 | if (new_stmt == gsi_stmt (gsi)) | |
4466 | { | |
4467 | /* It is okay to check only for the very last | |
4468 | of these statements. If it is a throwing | |
4469 | statement nothing will change. If it isn't | |
4470 | this can remove EH edges. If that weren't | |
4471 | correct then because some intermediate stmts | |
4472 | throw, but not the last one. That would mean | |
4473 | we'd have to split the block, which we can't | |
4474 | here and we'd loose anyway. And as builtins | |
4475 | probably never throw, this all | |
4476 | is mood anyway. */ | |
4477 | if (maybe_clean_or_replace_eh_stmt (old_stmt, | |
4478 | new_stmt)) | |
4479 | gimple_purge_dead_eh_edges (BASIC_BLOCK (first)); | |
4480 | break; | |
4481 | } | |
4482 | gsi_next (&i2); | |
4483 | } | |
4484 | } | |
4485 | } | |
4486 | else if (fold_stmt (&gsi)) | |
9477eb38 | 4487 | { |
726a989a RB |
4488 | /* Re-read the statement from GSI as fold_stmt() may |
4489 | have changed it. */ | |
4490 | gimple new_stmt = gsi_stmt (gsi); | |
4491 | update_stmt (new_stmt); | |
4492 | ||
4b685e14 JH |
4493 | if (is_gimple_call (old_stmt) |
4494 | || is_gimple_call (new_stmt)) | |
44e10129 MM |
4495 | cgraph_update_edges_for_call_stmt (old_stmt, old_decl, |
4496 | new_stmt); | |
726a989a RB |
4497 | |
4498 | if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt)) | |
4499 | gimple_purge_dead_eh_edges (BASIC_BLOCK (first)); | |
9477eb38 JH |
4500 | } |
4501 | } | |
b8a00a4d JH |
4502 | } |
4503 | } | |
4504 | ||
1084e689 JH |
4505 | /* Return true if BB has at least one abnormal outgoing edge. */ |
4506 | ||
4507 | static inline bool | |
4508 | has_abnormal_outgoing_edge_p (basic_block bb) | |
4509 | { | |
4510 | edge e; | |
4511 | edge_iterator ei; | |
4512 | ||
4513 | FOR_EACH_EDGE (e, ei, bb->succs) | |
4514 | if (e->flags & EDGE_ABNORMAL) | |
4515 | return true; | |
4516 | ||
4517 | return false; | |
4518 | } | |
4519 | ||
d4e4baa9 AO |
4520 | /* Expand calls to inline functions in the body of FN. */ |
4521 | ||
873aa8f5 | 4522 | unsigned int |
46c5ad27 | 4523 | optimize_inline_calls (tree fn) |
d4e4baa9 | 4524 | { |
1b369fae | 4525 | copy_body_data id; |
e21aff8a | 4526 | basic_block bb; |
b8a00a4d | 4527 | int last = n_basic_blocks; |
d406b663 | 4528 | struct gimplify_ctx gctx; |
5d7b099c | 4529 | bool inlined_p = false; |
d406b663 | 4530 | |
d4e4baa9 AO |
4531 | /* Clear out ID. */ |
4532 | memset (&id, 0, sizeof (id)); | |
4533 | ||
581985d7 | 4534 | id.src_node = id.dst_node = cgraph_get_node (fn); |
67348ccc | 4535 | gcc_assert (id.dst_node->definition); |
1b369fae | 4536 | id.dst_fn = fn; |
d4e4baa9 | 4537 | /* Or any functions that aren't finished yet. */ |
d4e4baa9 | 4538 | if (current_function_decl) |
0f900dfa | 4539 | id.dst_fn = current_function_decl; |
1b369fae RH |
4540 | |
4541 | id.copy_decl = copy_decl_maybe_to_var; | |
4542 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
4543 | id.transform_new_cfg = false; | |
4544 | id.transform_return_to_modify = true; | |
78bbd765 | 4545 | id.transform_parameter = true; |
9ff420f1 | 4546 | id.transform_lang_insert_block = NULL; |
b8a00a4d | 4547 | id.statements_to_fold = pointer_set_create (); |
1b369fae | 4548 | |
d406b663 | 4549 | push_gimplify_context (&gctx); |
d4e4baa9 | 4550 | |
672987e8 ZD |
4551 | /* We make no attempts to keep dominance info up-to-date. */ |
4552 | free_dominance_info (CDI_DOMINATORS); | |
4553 | free_dominance_info (CDI_POST_DOMINATORS); | |
4554 | ||
726a989a RB |
4555 | /* Register specific gimple functions. */ |
4556 | gimple_register_cfg_hooks (); | |
4557 | ||
e21aff8a SB |
4558 | /* Reach the trees by walking over the CFG, and note the |
4559 | enclosing basic-blocks in the call edges. */ | |
4560 | /* We walk the blocks going forward, because inlined function bodies | |
4561 | will split id->current_basic_block, and the new blocks will | |
4562 | follow it; we'll trudge through them, processing their CALL_EXPRs | |
4563 | along the way. */ | |
4564 | FOR_EACH_BB (bb) | |
5d7b099c | 4565 | inlined_p |= gimple_expand_calls_inline (bb, &id); |
d4e4baa9 | 4566 | |
e21aff8a | 4567 | pop_gimplify_context (NULL); |
6de9cd9a | 4568 | |
18c6ada9 JH |
4569 | #ifdef ENABLE_CHECKING |
4570 | { | |
4571 | struct cgraph_edge *e; | |
4572 | ||
1b369fae | 4573 | verify_cgraph_node (id.dst_node); |
18c6ada9 JH |
4574 | |
4575 | /* Double check that we inlined everything we are supposed to inline. */ | |
1b369fae | 4576 | for (e = id.dst_node->callees; e; e = e->next_callee) |
1e128c5f | 4577 | gcc_assert (e->inline_failed); |
18c6ada9 JH |
4578 | } |
4579 | #endif | |
b8698a0f | 4580 | |
5d7b099c | 4581 | /* Fold queued statements. */ |
a9eafe81 AP |
4582 | fold_marked_statements (last, id.statements_to_fold); |
4583 | pointer_set_destroy (id.statements_to_fold); | |
b8698a0f | 4584 | |
9771b263 | 4585 | gcc_assert (!id.debug_stmts.exists ()); |
b5b8b0ac | 4586 | |
5d7b099c RG |
4587 | /* If we didn't inline into the function there is nothing to do. */ |
4588 | if (!inlined_p) | |
4589 | return 0; | |
4590 | ||
a9eafe81 AP |
4591 | /* Renumber the lexical scoping (non-code) blocks consecutively. */ |
4592 | number_blocks (fn); | |
b8a00a4d | 4593 | |
078c3644 JH |
4594 | delete_unreachable_blocks_update_callgraph (&id); |
4595 | #ifdef ENABLE_CHECKING | |
4596 | verify_cgraph_node (id.dst_node); | |
4597 | #endif | |
726a989a | 4598 | |
110cfe1c JH |
4599 | /* It would be nice to check SSA/CFG/statement consistency here, but it is |
4600 | not possible yet - the IPA passes might make various functions to not | |
4601 | throw and they don't care to proactively update local EH info. This is | |
4602 | done later in fixup_cfg pass that also execute the verification. */ | |
726a989a RB |
4603 | return (TODO_update_ssa |
4604 | | TODO_cleanup_cfg | |
45a80bb9 | 4605 | | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0) |
5d7b099c | 4606 | | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0) |
45a80bb9 | 4607 | | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0)); |
d4e4baa9 AO |
4608 | } |
4609 | ||
d4e4baa9 AO |
4610 | /* Passed to walk_tree. Copies the node pointed to, if appropriate. */ |
4611 | ||
4612 | tree | |
46c5ad27 | 4613 | copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
d4e4baa9 AO |
4614 | { |
4615 | enum tree_code code = TREE_CODE (*tp); | |
07beea0d | 4616 | enum tree_code_class cl = TREE_CODE_CLASS (code); |
d4e4baa9 AO |
4617 | |
4618 | /* We make copies of most nodes. */ | |
07beea0d | 4619 | if (IS_EXPR_CODE_CLASS (cl) |
d4e4baa9 AO |
4620 | || code == TREE_LIST |
4621 | || code == TREE_VEC | |
8843c120 DN |
4622 | || code == TYPE_DECL |
4623 | || code == OMP_CLAUSE) | |
d4e4baa9 AO |
4624 | { |
4625 | /* Because the chain gets clobbered when we make a copy, we save it | |
4626 | here. */ | |
82d6e6fc | 4627 | tree chain = NULL_TREE, new_tree; |
07beea0d | 4628 | |
81f653d6 NF |
4629 | if (CODE_CONTAINS_STRUCT (code, TS_COMMON)) |
4630 | chain = TREE_CHAIN (*tp); | |
d4e4baa9 AO |
4631 | |
4632 | /* Copy the node. */ | |
82d6e6fc | 4633 | new_tree = copy_node (*tp); |
6de9cd9a | 4634 | |
82d6e6fc | 4635 | *tp = new_tree; |
d4e4baa9 AO |
4636 | |
4637 | /* Now, restore the chain, if appropriate. That will cause | |
4638 | walk_tree to walk into the chain as well. */ | |
50674e96 DN |
4639 | if (code == PARM_DECL |
4640 | || code == TREE_LIST | |
aaf46ef9 | 4641 | || code == OMP_CLAUSE) |
d4e4baa9 AO |
4642 | TREE_CHAIN (*tp) = chain; |
4643 | ||
4644 | /* For now, we don't update BLOCKs when we make copies. So, we | |
6de9cd9a DN |
4645 | have to nullify all BIND_EXPRs. */ |
4646 | if (TREE_CODE (*tp) == BIND_EXPR) | |
4647 | BIND_EXPR_BLOCK (*tp) = NULL_TREE; | |
d4e4baa9 | 4648 | } |
4038c495 GB |
4649 | else if (code == CONSTRUCTOR) |
4650 | { | |
4651 | /* CONSTRUCTOR nodes need special handling because | |
4652 | we need to duplicate the vector of elements. */ | |
82d6e6fc | 4653 | tree new_tree; |
4038c495 | 4654 | |
82d6e6fc | 4655 | new_tree = copy_node (*tp); |
9771b263 | 4656 | CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp)); |
82d6e6fc | 4657 | *tp = new_tree; |
4038c495 | 4658 | } |
3533b943 | 4659 | else if (code == STATEMENT_LIST) |
deb5046b JM |
4660 | /* We used to just abort on STATEMENT_LIST, but we can run into them |
4661 | with statement-expressions (c++/40975). */ | |
4662 | copy_statement_list (tp); | |
6615c446 | 4663 | else if (TREE_CODE_CLASS (code) == tcc_type) |
d4e4baa9 | 4664 | *walk_subtrees = 0; |
6615c446 | 4665 | else if (TREE_CODE_CLASS (code) == tcc_declaration) |
6de9cd9a | 4666 | *walk_subtrees = 0; |
a396f8ae GK |
4667 | else if (TREE_CODE_CLASS (code) == tcc_constant) |
4668 | *walk_subtrees = 0; | |
d4e4baa9 AO |
4669 | return NULL_TREE; |
4670 | } | |
4671 | ||
4672 | /* The SAVE_EXPR pointed to by TP is being copied. If ST contains | |
aa4a53af | 4673 | information indicating to what new SAVE_EXPR this one should be mapped, |
e21aff8a SB |
4674 | use that one. Otherwise, create a new node and enter it in ST. FN is |
4675 | the function into which the copy will be placed. */ | |
d4e4baa9 | 4676 | |
892c7e1e | 4677 | static void |
82c82743 | 4678 | remap_save_expr (tree *tp, void *st_, int *walk_subtrees) |
d4e4baa9 | 4679 | { |
6be42dd4 RG |
4680 | struct pointer_map_t *st = (struct pointer_map_t *) st_; |
4681 | tree *n; | |
5e20bdd7 | 4682 | tree t; |
d4e4baa9 AO |
4683 | |
4684 | /* See if we already encountered this SAVE_EXPR. */ | |
6be42dd4 | 4685 | n = (tree *) pointer_map_contains (st, *tp); |
d92b4486 | 4686 | |
d4e4baa9 AO |
4687 | /* If we didn't already remap this SAVE_EXPR, do so now. */ |
4688 | if (!n) | |
4689 | { | |
5e20bdd7 | 4690 | t = copy_node (*tp); |
d4e4baa9 | 4691 | |
d4e4baa9 | 4692 | /* Remember this SAVE_EXPR. */ |
6be42dd4 | 4693 | *pointer_map_insert (st, *tp) = t; |
350ebd54 | 4694 | /* Make sure we don't remap an already-remapped SAVE_EXPR. */ |
6be42dd4 | 4695 | *pointer_map_insert (st, t) = t; |
d4e4baa9 AO |
4696 | } |
4697 | else | |
5e20bdd7 JZ |
4698 | { |
4699 | /* We've already walked into this SAVE_EXPR; don't do it again. */ | |
4700 | *walk_subtrees = 0; | |
6be42dd4 | 4701 | t = *n; |
5e20bdd7 | 4702 | } |
d4e4baa9 AO |
4703 | |
4704 | /* Replace this SAVE_EXPR with the copy. */ | |
5e20bdd7 | 4705 | *tp = t; |
d4e4baa9 | 4706 | } |
d436bff8 | 4707 | |
726a989a RB |
4708 | /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local |
4709 | label, copies the declaration and enters it in the splay_tree in DATA (which | |
4710 | is really a 'copy_body_data *'. */ | |
4711 | ||
4712 | static tree | |
4713 | mark_local_labels_stmt (gimple_stmt_iterator *gsip, | |
4714 | bool *handled_ops_p ATTRIBUTE_UNUSED, | |
4715 | struct walk_stmt_info *wi) | |
4716 | { | |
4717 | copy_body_data *id = (copy_body_data *) wi->info; | |
4718 | gimple stmt = gsi_stmt (*gsip); | |
4719 | ||
4720 | if (gimple_code (stmt) == GIMPLE_LABEL) | |
4721 | { | |
4722 | tree decl = gimple_label_label (stmt); | |
4723 | ||
4724 | /* Copy the decl and remember the copy. */ | |
4725 | insert_decl_map (id, decl, id->copy_decl (decl, id)); | |
4726 | } | |
4727 | ||
4728 | return NULL_TREE; | |
4729 | } | |
4730 | ||
4731 | ||
4732 | /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local. | |
4733 | Using the splay_tree pointed to by ST (which is really a `splay_tree'), | |
4734 | remaps all local declarations to appropriate replacements in gimple | |
4735 | operands. */ | |
4736 | ||
4737 | static tree | |
4738 | replace_locals_op (tree *tp, int *walk_subtrees, void *data) | |
4739 | { | |
4740 | struct walk_stmt_info *wi = (struct walk_stmt_info*) data; | |
4741 | copy_body_data *id = (copy_body_data *) wi->info; | |
4742 | struct pointer_map_t *st = id->decl_map; | |
4743 | tree *n; | |
4744 | tree expr = *tp; | |
4745 | ||
4746 | /* Only a local declaration (variable or label). */ | |
4747 | if ((TREE_CODE (expr) == VAR_DECL | |
4748 | && !TREE_STATIC (expr)) | |
4749 | || TREE_CODE (expr) == LABEL_DECL) | |
4750 | { | |
4751 | /* Lookup the declaration. */ | |
4752 | n = (tree *) pointer_map_contains (st, expr); | |
4753 | ||
4754 | /* If it's there, remap it. */ | |
4755 | if (n) | |
4756 | *tp = *n; | |
4757 | *walk_subtrees = 0; | |
4758 | } | |
4759 | else if (TREE_CODE (expr) == STATEMENT_LIST | |
4760 | || TREE_CODE (expr) == BIND_EXPR | |
4761 | || TREE_CODE (expr) == SAVE_EXPR) | |
4762 | gcc_unreachable (); | |
4763 | else if (TREE_CODE (expr) == TARGET_EXPR) | |
4764 | { | |
4765 | /* Don't mess with a TARGET_EXPR that hasn't been expanded. | |
4766 | It's OK for this to happen if it was part of a subtree that | |
4767 | isn't immediately expanded, such as operand 2 of another | |
4768 | TARGET_EXPR. */ | |
4769 | if (!TREE_OPERAND (expr, 1)) | |
4770 | { | |
4771 | TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3); | |
4772 | TREE_OPERAND (expr, 3) = NULL_TREE; | |
4773 | } | |
4774 | } | |
4775 | ||
4776 | /* Keep iterating. */ | |
4777 | return NULL_TREE; | |
4778 | } | |
4779 | ||
4780 | ||
4781 | /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local. | |
4782 | Using the splay_tree pointed to by ST (which is really a `splay_tree'), | |
4783 | remaps all local declarations to appropriate replacements in gimple | |
4784 | statements. */ | |
4785 | ||
4786 | static tree | |
4787 | replace_locals_stmt (gimple_stmt_iterator *gsip, | |
4788 | bool *handled_ops_p ATTRIBUTE_UNUSED, | |
4789 | struct walk_stmt_info *wi) | |
4790 | { | |
4791 | copy_body_data *id = (copy_body_data *) wi->info; | |
4792 | gimple stmt = gsi_stmt (*gsip); | |
4793 | ||
4794 | if (gimple_code (stmt) == GIMPLE_BIND) | |
4795 | { | |
4796 | tree block = gimple_bind_block (stmt); | |
4797 | ||
4798 | if (block) | |
4799 | { | |
4800 | remap_block (&block, id); | |
4801 | gimple_bind_set_block (stmt, block); | |
4802 | } | |
4803 | ||
4804 | /* This will remap a lot of the same decls again, but this should be | |
4805 | harmless. */ | |
4806 | if (gimple_bind_vars (stmt)) | |
9771b263 DN |
4807 | gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), |
4808 | NULL, id)); | |
726a989a RB |
4809 | } |
4810 | ||
4811 | /* Keep iterating. */ | |
4812 | return NULL_TREE; | |
4813 | } | |
4814 | ||
4815 | ||
4816 | /* Copies everything in SEQ and replaces variables and labels local to | |
4817 | current_function_decl. */ | |
4818 | ||
4819 | gimple_seq | |
4820 | copy_gimple_seq_and_replace_locals (gimple_seq seq) | |
4821 | { | |
4822 | copy_body_data id; | |
4823 | struct walk_stmt_info wi; | |
4824 | struct pointer_set_t *visited; | |
4825 | gimple_seq copy; | |
4826 | ||
4827 | /* There's nothing to do for NULL_TREE. */ | |
4828 | if (seq == NULL) | |
4829 | return seq; | |
4830 | ||
4831 | /* Set up ID. */ | |
4832 | memset (&id, 0, sizeof (id)); | |
4833 | id.src_fn = current_function_decl; | |
4834 | id.dst_fn = current_function_decl; | |
4835 | id.decl_map = pointer_map_create (); | |
b5b8b0ac | 4836 | id.debug_map = NULL; |
726a989a RB |
4837 | |
4838 | id.copy_decl = copy_decl_no_change; | |
4839 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
4840 | id.transform_new_cfg = false; | |
4841 | id.transform_return_to_modify = false; | |
78bbd765 | 4842 | id.transform_parameter = false; |
726a989a RB |
4843 | id.transform_lang_insert_block = NULL; |
4844 | ||
4845 | /* Walk the tree once to find local labels. */ | |
4846 | memset (&wi, 0, sizeof (wi)); | |
4847 | visited = pointer_set_create (); | |
4848 | wi.info = &id; | |
4849 | wi.pset = visited; | |
4850 | walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi); | |
4851 | pointer_set_destroy (visited); | |
4852 | ||
4853 | copy = gimple_seq_copy (seq); | |
4854 | ||
4855 | /* Walk the copy, remapping decls. */ | |
4856 | memset (&wi, 0, sizeof (wi)); | |
4857 | wi.info = &id; | |
4858 | walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi); | |
4859 | ||
4860 | /* Clean up. */ | |
4861 | pointer_map_destroy (id.decl_map); | |
b5b8b0ac AO |
4862 | if (id.debug_map) |
4863 | pointer_map_destroy (id.debug_map); | |
726a989a RB |
4864 | |
4865 | return copy; | |
4866 | } | |
4867 | ||
4868 | ||
6de9cd9a | 4869 | /* Allow someone to determine if SEARCH is a child of TOP from gdb. */ |
aa4a53af | 4870 | |
6de9cd9a DN |
4871 | static tree |
4872 | debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data) | |
4873 | { | |
4874 | if (*tp == data) | |
4875 | return (tree) data; | |
4876 | else | |
4877 | return NULL; | |
4878 | } | |
4879 | ||
24e47c76 | 4880 | DEBUG_FUNCTION bool |
6de9cd9a DN |
4881 | debug_find_tree (tree top, tree search) |
4882 | { | |
4883 | return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0; | |
4884 | } | |
4885 | ||
e21aff8a | 4886 | |
6de9cd9a DN |
4887 | /* Declare the variables created by the inliner. Add all the variables in |
4888 | VARS to BIND_EXPR. */ | |
4889 | ||
4890 | static void | |
e21aff8a | 4891 | declare_inline_vars (tree block, tree vars) |
6de9cd9a | 4892 | { |
84936f6f | 4893 | tree t; |
910ad8de | 4894 | for (t = vars; t; t = DECL_CHAIN (t)) |
9659ce8b JH |
4895 | { |
4896 | DECL_SEEN_IN_BIND_EXPR_P (t) = 1; | |
4897 | gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t)); | |
c021f10b | 4898 | add_local_decl (cfun, t); |
9659ce8b | 4899 | } |
6de9cd9a | 4900 | |
e21aff8a SB |
4901 | if (block) |
4902 | BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars); | |
4903 | } | |
4904 | ||
19734dd8 | 4905 | /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN, |
1b369fae RH |
4906 | but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to |
4907 | VAR_DECL translation. */ | |
19734dd8 | 4908 | |
1b369fae RH |
4909 | static tree |
4910 | copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy) | |
19734dd8 | 4911 | { |
19734dd8 RL |
4912 | /* Don't generate debug information for the copy if we wouldn't have |
4913 | generated it for the copy either. */ | |
4914 | DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl); | |
4915 | DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl); | |
4916 | ||
4917 | /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what | |
b8698a0f | 4918 | declaration inspired this copy. */ |
19734dd8 RL |
4919 | DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl); |
4920 | ||
4921 | /* The new variable/label has no RTL, yet. */ | |
68a976f2 RL |
4922 | if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL) |
4923 | && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy)) | |
2eb79bbb | 4924 | SET_DECL_RTL (copy, 0); |
b8698a0f | 4925 | |
19734dd8 RL |
4926 | /* These args would always appear unused, if not for this. */ |
4927 | TREE_USED (copy) = 1; | |
4928 | ||
4929 | /* Set the context for the new declaration. */ | |
4930 | if (!DECL_CONTEXT (decl)) | |
4931 | /* Globals stay global. */ | |
4932 | ; | |
1b369fae | 4933 | else if (DECL_CONTEXT (decl) != id->src_fn) |
19734dd8 RL |
4934 | /* Things that weren't in the scope of the function we're inlining |
4935 | from aren't in the scope we're inlining to, either. */ | |
4936 | ; | |
4937 | else if (TREE_STATIC (decl)) | |
4938 | /* Function-scoped static variables should stay in the original | |
4939 | function. */ | |
4940 | ; | |
4941 | else | |
4942 | /* Ordinary automatic local variables are now in the scope of the | |
4943 | new function. */ | |
1b369fae | 4944 | DECL_CONTEXT (copy) = id->dst_fn; |
19734dd8 RL |
4945 | |
4946 | return copy; | |
4947 | } | |
4948 | ||
1b369fae RH |
4949 | static tree |
4950 | copy_decl_to_var (tree decl, copy_body_data *id) | |
4951 | { | |
4952 | tree copy, type; | |
4953 | ||
4954 | gcc_assert (TREE_CODE (decl) == PARM_DECL | |
4955 | || TREE_CODE (decl) == RESULT_DECL); | |
4956 | ||
4957 | type = TREE_TYPE (decl); | |
4958 | ||
c2255bc4 AH |
4959 | copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn), |
4960 | VAR_DECL, DECL_NAME (decl), type); | |
25a6a873 RG |
4961 | if (DECL_PT_UID_SET_P (decl)) |
4962 | SET_DECL_PT_UID (copy, DECL_PT_UID (decl)); | |
1b369fae RH |
4963 | TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl); |
4964 | TREE_READONLY (copy) = TREE_READONLY (decl); | |
4965 | TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl); | |
0890b981 | 4966 | DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl); |
1b369fae RH |
4967 | |
4968 | return copy_decl_for_dup_finish (id, decl, copy); | |
4969 | } | |
4970 | ||
c08cd4c1 JM |
4971 | /* Like copy_decl_to_var, but create a return slot object instead of a |
4972 | pointer variable for return by invisible reference. */ | |
4973 | ||
4974 | static tree | |
4975 | copy_result_decl_to_var (tree decl, copy_body_data *id) | |
4976 | { | |
4977 | tree copy, type; | |
4978 | ||
4979 | gcc_assert (TREE_CODE (decl) == PARM_DECL | |
4980 | || TREE_CODE (decl) == RESULT_DECL); | |
4981 | ||
4982 | type = TREE_TYPE (decl); | |
4983 | if (DECL_BY_REFERENCE (decl)) | |
4984 | type = TREE_TYPE (type); | |
4985 | ||
c2255bc4 AH |
4986 | copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn), |
4987 | VAR_DECL, DECL_NAME (decl), type); | |
25a6a873 RG |
4988 | if (DECL_PT_UID_SET_P (decl)) |
4989 | SET_DECL_PT_UID (copy, DECL_PT_UID (decl)); | |
c08cd4c1 JM |
4990 | TREE_READONLY (copy) = TREE_READONLY (decl); |
4991 | TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl); | |
4992 | if (!DECL_BY_REFERENCE (decl)) | |
4993 | { | |
4994 | TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl); | |
0890b981 | 4995 | DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl); |
c08cd4c1 JM |
4996 | } |
4997 | ||
4998 | return copy_decl_for_dup_finish (id, decl, copy); | |
4999 | } | |
5000 | ||
9ff420f1 | 5001 | tree |
1b369fae RH |
5002 | copy_decl_no_change (tree decl, copy_body_data *id) |
5003 | { | |
5004 | tree copy; | |
5005 | ||
5006 | copy = copy_node (decl); | |
5007 | ||
5008 | /* The COPY is not abstract; it will be generated in DST_FN. */ | |
5009 | DECL_ABSTRACT (copy) = 0; | |
5010 | lang_hooks.dup_lang_specific_decl (copy); | |
5011 | ||
5012 | /* TREE_ADDRESSABLE isn't used to indicate that a label's address has | |
5013 | been taken; it's for internal bookkeeping in expand_goto_internal. */ | |
5014 | if (TREE_CODE (copy) == LABEL_DECL) | |
5015 | { | |
5016 | TREE_ADDRESSABLE (copy) = 0; | |
5017 | LABEL_DECL_UID (copy) = -1; | |
5018 | } | |
5019 | ||
5020 | return copy_decl_for_dup_finish (id, decl, copy); | |
5021 | } | |
5022 | ||
5023 | static tree | |
5024 | copy_decl_maybe_to_var (tree decl, copy_body_data *id) | |
5025 | { | |
5026 | if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL) | |
5027 | return copy_decl_to_var (decl, id); | |
5028 | else | |
5029 | return copy_decl_no_change (decl, id); | |
5030 | } | |
5031 | ||
19734dd8 RL |
5032 | /* Return a copy of the function's argument tree. */ |
5033 | static tree | |
c6f7cfc1 JH |
5034 | copy_arguments_for_versioning (tree orig_parm, copy_body_data * id, |
5035 | bitmap args_to_skip, tree *vars) | |
19734dd8 | 5036 | { |
c6f7cfc1 JH |
5037 | tree arg, *parg; |
5038 | tree new_parm = NULL; | |
5039 | int i = 0; | |
19734dd8 | 5040 | |
c6f7cfc1 JH |
5041 | parg = &new_parm; |
5042 | ||
910ad8de | 5043 | for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++) |
c6f7cfc1 JH |
5044 | if (!args_to_skip || !bitmap_bit_p (args_to_skip, i)) |
5045 | { | |
5046 | tree new_tree = remap_decl (arg, id); | |
d7da5cc8 MJ |
5047 | if (TREE_CODE (new_tree) != PARM_DECL) |
5048 | new_tree = id->copy_decl (arg, id); | |
c6f7cfc1 JH |
5049 | lang_hooks.dup_lang_specific_decl (new_tree); |
5050 | *parg = new_tree; | |
910ad8de | 5051 | parg = &DECL_CHAIN (new_tree); |
c6f7cfc1 | 5052 | } |
eb50f5f4 | 5053 | else if (!pointer_map_contains (id->decl_map, arg)) |
c6f7cfc1 JH |
5054 | { |
5055 | /* Make an equivalent VAR_DECL. If the argument was used | |
5056 | as temporary variable later in function, the uses will be | |
5057 | replaced by local variable. */ | |
5058 | tree var = copy_decl_to_var (arg, id); | |
c6f7cfc1 JH |
5059 | insert_decl_map (id, arg, var); |
5060 | /* Declare this new variable. */ | |
910ad8de | 5061 | DECL_CHAIN (var) = *vars; |
c6f7cfc1 JH |
5062 | *vars = var; |
5063 | } | |
5064 | return new_parm; | |
19734dd8 RL |
5065 | } |
5066 | ||
5067 | /* Return a copy of the function's static chain. */ | |
5068 | static tree | |
1b369fae | 5069 | copy_static_chain (tree static_chain, copy_body_data * id) |
19734dd8 RL |
5070 | { |
5071 | tree *chain_copy, *pvar; | |
5072 | ||
5073 | chain_copy = &static_chain; | |
910ad8de | 5074 | for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar)) |
19734dd8 | 5075 | { |
82d6e6fc KG |
5076 | tree new_tree = remap_decl (*pvar, id); |
5077 | lang_hooks.dup_lang_specific_decl (new_tree); | |
910ad8de | 5078 | DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar); |
82d6e6fc | 5079 | *pvar = new_tree; |
19734dd8 RL |
5080 | } |
5081 | return static_chain; | |
5082 | } | |
5083 | ||
5084 | /* Return true if the function is allowed to be versioned. | |
5085 | This is a guard for the versioning functionality. */ | |
27dbd3ac | 5086 | |
19734dd8 RL |
5087 | bool |
5088 | tree_versionable_function_p (tree fndecl) | |
5089 | { | |
86631ea3 MJ |
5090 | return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl)) |
5091 | && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL); | |
19734dd8 RL |
5092 | } |
5093 | ||
9187e02d JH |
5094 | /* Delete all unreachable basic blocks and update callgraph. |
5095 | Doing so is somewhat nontrivial because we need to update all clones and | |
5096 | remove inline function that become unreachable. */ | |
9f5e9983 | 5097 | |
9187e02d JH |
5098 | static bool |
5099 | delete_unreachable_blocks_update_callgraph (copy_body_data *id) | |
9f5e9983 | 5100 | { |
9187e02d JH |
5101 | bool changed = false; |
5102 | basic_block b, next_bb; | |
5103 | ||
5104 | find_unreachable_blocks (); | |
5105 | ||
5106 | /* Delete all unreachable basic blocks. */ | |
5107 | ||
5108 | for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb) | |
5109 | { | |
5110 | next_bb = b->next_bb; | |
5111 | ||
5112 | if (!(b->flags & BB_REACHABLE)) | |
5113 | { | |
5114 | gimple_stmt_iterator bsi; | |
5115 | ||
5116 | for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi)) | |
042ae7d2 JH |
5117 | { |
5118 | struct cgraph_edge *e; | |
5119 | struct cgraph_node *node; | |
9187e02d | 5120 | |
67348ccc | 5121 | ipa_remove_stmt_references (id->dst_node, gsi_stmt (bsi)); |
042ae7d2 JH |
5122 | |
5123 | if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL | |
5124 | &&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL) | |
5125 | { | |
5126 | if (!e->inline_failed) | |
5127 | cgraph_remove_node_and_inline_clones (e->callee, id->dst_node); | |
5128 | else | |
5129 | cgraph_remove_edge (e); | |
5130 | } | |
5131 | if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES | |
5132 | && id->dst_node->clones) | |
5133 | for (node = id->dst_node->clones; node != id->dst_node;) | |
9187e02d | 5134 | { |
67348ccc | 5135 | ipa_remove_stmt_references (node, gsi_stmt (bsi)); |
042ae7d2 JH |
5136 | if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL |
5137 | && (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL) | |
5138 | { | |
5139 | if (!e->inline_failed) | |
5140 | cgraph_remove_node_and_inline_clones (e->callee, id->dst_node); | |
5141 | else | |
5142 | cgraph_remove_edge (e); | |
5143 | } | |
5144 | ||
5145 | if (node->clones) | |
5146 | node = node->clones; | |
5147 | else if (node->next_sibling_clone) | |
5148 | node = node->next_sibling_clone; | |
9187e02d | 5149 | else |
042ae7d2 JH |
5150 | { |
5151 | while (node != id->dst_node && !node->next_sibling_clone) | |
5152 | node = node->clone_of; | |
5153 | if (node != id->dst_node) | |
5154 | node = node->next_sibling_clone; | |
5155 | } | |
9187e02d | 5156 | } |
042ae7d2 | 5157 | } |
9187e02d JH |
5158 | delete_basic_block (b); |
5159 | changed = true; | |
5160 | } | |
5161 | } | |
5162 | ||
9187e02d | 5163 | return changed; |
9f5e9983 JJ |
5164 | } |
5165 | ||
08ad1d6d JH |
5166 | /* Update clone info after duplication. */ |
5167 | ||
5168 | static void | |
5169 | update_clone_info (copy_body_data * id) | |
5170 | { | |
5171 | struct cgraph_node *node; | |
5172 | if (!id->dst_node->clones) | |
5173 | return; | |
5174 | for (node = id->dst_node->clones; node != id->dst_node;) | |
5175 | { | |
5176 | /* First update replace maps to match the new body. */ | |
5177 | if (node->clone.tree_map) | |
5178 | { | |
5179 | unsigned int i; | |
9771b263 | 5180 | for (i = 0; i < vec_safe_length (node->clone.tree_map); i++) |
08ad1d6d JH |
5181 | { |
5182 | struct ipa_replace_map *replace_info; | |
9771b263 | 5183 | replace_info = (*node->clone.tree_map)[i]; |
08ad1d6d JH |
5184 | walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL); |
5185 | walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL); | |
5186 | } | |
5187 | } | |
5188 | if (node->clones) | |
5189 | node = node->clones; | |
5190 | else if (node->next_sibling_clone) | |
5191 | node = node->next_sibling_clone; | |
5192 | else | |
5193 | { | |
5194 | while (node != id->dst_node && !node->next_sibling_clone) | |
5195 | node = node->clone_of; | |
5196 | if (node != id->dst_node) | |
5197 | node = node->next_sibling_clone; | |
5198 | } | |
5199 | } | |
5200 | } | |
5201 | ||
19734dd8 RL |
5202 | /* Create a copy of a function's tree. |
5203 | OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes | |
5204 | of the original function and the new copied function | |
b8698a0f L |
5205 | respectively. In case we want to replace a DECL |
5206 | tree with another tree while duplicating the function's | |
5207 | body, TREE_MAP represents the mapping between these | |
ea99e0be | 5208 | trees. If UPDATE_CLONES is set, the call_stmt fields |
91382288 JH |
5209 | of edges of clones of the function will be updated. |
5210 | ||
5211 | If non-NULL ARGS_TO_SKIP determine function parameters to remove | |
5212 | from new version. | |
1a2c27e9 | 5213 | If SKIP_RETURN is true, the new version will return void. |
91382288 JH |
5214 | If non-NULL BLOCK_TO_COPY determine what basic blocks to copy. |
5215 | If non_NULL NEW_ENTRY determine new entry BB of the clone. | |
5216 | */ | |
19734dd8 | 5217 | void |
27dbd3ac | 5218 | tree_function_versioning (tree old_decl, tree new_decl, |
9771b263 | 5219 | vec<ipa_replace_map_p, va_gc> *tree_map, |
91382288 | 5220 | bool update_clones, bitmap args_to_skip, |
1a2c27e9 EB |
5221 | bool skip_return, bitmap blocks_to_copy, |
5222 | basic_block new_entry) | |
19734dd8 RL |
5223 | { |
5224 | struct cgraph_node *old_version_node; | |
5225 | struct cgraph_node *new_version_node; | |
1b369fae | 5226 | copy_body_data id; |
110cfe1c | 5227 | tree p; |
19734dd8 RL |
5228 | unsigned i; |
5229 | struct ipa_replace_map *replace_info; | |
b5b8b0ac | 5230 | basic_block old_entry_block, bb; |
07687835 | 5231 | stack_vec<gimple, 10> init_stmts; |
0f1961a2 | 5232 | tree vars = NULL_TREE; |
19734dd8 RL |
5233 | |
5234 | gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL | |
5235 | && TREE_CODE (new_decl) == FUNCTION_DECL); | |
5236 | DECL_POSSIBLY_INLINED (old_decl) = 1; | |
5237 | ||
fe660d7b MJ |
5238 | old_version_node = cgraph_get_node (old_decl); |
5239 | gcc_checking_assert (old_version_node); | |
5240 | new_version_node = cgraph_get_node (new_decl); | |
5241 | gcc_checking_assert (new_version_node); | |
19734dd8 | 5242 | |
ddb555ed JJ |
5243 | /* Copy over debug args. */ |
5244 | if (DECL_HAS_DEBUG_ARGS_P (old_decl)) | |
5245 | { | |
9771b263 | 5246 | vec<tree, va_gc> **new_debug_args, **old_debug_args; |
ddb555ed JJ |
5247 | gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL); |
5248 | DECL_HAS_DEBUG_ARGS_P (new_decl) = 0; | |
5249 | old_debug_args = decl_debug_args_lookup (old_decl); | |
5250 | if (old_debug_args) | |
5251 | { | |
5252 | new_debug_args = decl_debug_args_insert (new_decl); | |
9771b263 | 5253 | *new_debug_args = vec_safe_copy (*old_debug_args); |
ddb555ed JJ |
5254 | } |
5255 | } | |
5256 | ||
a3aadcc5 JH |
5257 | /* Output the inlining info for this abstract function, since it has been |
5258 | inlined. If we don't do this now, we can lose the information about the | |
5259 | variables in the function when the blocks get blown away as soon as we | |
5260 | remove the cgraph node. */ | |
5261 | (*debug_hooks->outlining_inline_function) (old_decl); | |
5262 | ||
19734dd8 RL |
5263 | DECL_ARTIFICIAL (new_decl) = 1; |
5264 | DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl); | |
c0c123ef JH |
5265 | if (DECL_ORIGIN (old_decl) == old_decl) |
5266 | old_version_node->used_as_abstract_origin = true; | |
f9417da1 | 5267 | DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl); |
19734dd8 | 5268 | |
3d283195 JH |
5269 | /* Prepare the data structures for the tree copy. */ |
5270 | memset (&id, 0, sizeof (id)); | |
5271 | ||
19734dd8 | 5272 | /* Generate a new name for the new version. */ |
9187e02d | 5273 | id.statements_to_fold = pointer_set_create (); |
b5b8b0ac | 5274 | |
6be42dd4 | 5275 | id.decl_map = pointer_map_create (); |
b5b8b0ac | 5276 | id.debug_map = NULL; |
1b369fae RH |
5277 | id.src_fn = old_decl; |
5278 | id.dst_fn = new_decl; | |
5279 | id.src_node = old_version_node; | |
5280 | id.dst_node = new_version_node; | |
5281 | id.src_cfun = DECL_STRUCT_FUNCTION (old_decl); | |
4029a5e0 | 5282 | id.blocks_to_copy = blocks_to_copy; |
9771b263 | 5283 | if (id.src_node->ipa_transforms_to_apply.exists ()) |
0e3776db | 5284 | { |
9771b263 DN |
5285 | vec<ipa_opt_pass> old_transforms_to_apply |
5286 | = id.dst_node->ipa_transforms_to_apply; | |
0e3776db JH |
5287 | unsigned int i; |
5288 | ||
9771b263 DN |
5289 | id.dst_node->ipa_transforms_to_apply |
5290 | = id.src_node->ipa_transforms_to_apply.copy (); | |
5291 | for (i = 0; i < old_transforms_to_apply.length (); i++) | |
5292 | id.dst_node->ipa_transforms_to_apply.safe_push (old_transforms_to_apply[i]); | |
5293 | old_transforms_to_apply.release (); | |
0e3776db | 5294 | } |
b8698a0f | 5295 | |
1b369fae RH |
5296 | id.copy_decl = copy_decl_no_change; |
5297 | id.transform_call_graph_edges | |
5298 | = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE; | |
5299 | id.transform_new_cfg = true; | |
5300 | id.transform_return_to_modify = false; | |
78bbd765 | 5301 | id.transform_parameter = false; |
9ff420f1 | 5302 | id.transform_lang_insert_block = NULL; |
1b369fae | 5303 | |
110cfe1c JH |
5304 | old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION |
5305 | (DECL_STRUCT_FUNCTION (old_decl)); | |
c0c123ef JH |
5306 | DECL_RESULT (new_decl) = DECL_RESULT (old_decl); |
5307 | DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl); | |
110cfe1c | 5308 | initialize_cfun (new_decl, old_decl, |
0d63a740 | 5309 | old_entry_block->count); |
1755aad0 RG |
5310 | DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta |
5311 | = id.src_cfun->gimple_df->ipa_pta; | |
b8698a0f | 5312 | |
19734dd8 RL |
5313 | /* Copy the function's static chain. */ |
5314 | p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl; | |
5315 | if (p) | |
5316 | DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl = | |
5317 | copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl, | |
5318 | &id); | |
b8698a0f | 5319 | |
19734dd8 RL |
5320 | /* If there's a tree_map, prepare for substitution. */ |
5321 | if (tree_map) | |
9771b263 | 5322 | for (i = 0; i < tree_map->length (); i++) |
19734dd8 | 5323 | { |
0f1961a2 | 5324 | gimple init; |
9771b263 | 5325 | replace_info = (*tree_map)[i]; |
1b369fae | 5326 | if (replace_info->replace_p) |
00fc2333 | 5327 | { |
922f15c2 JH |
5328 | if (!replace_info->old_tree) |
5329 | { | |
5330 | int i = replace_info->parm_num; | |
5331 | tree parm; | |
0e8853ee JH |
5332 | tree req_type; |
5333 | ||
910ad8de | 5334 | for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm)) |
922f15c2 JH |
5335 | i --; |
5336 | replace_info->old_tree = parm; | |
0e8853ee JH |
5337 | req_type = TREE_TYPE (parm); |
5338 | if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree))) | |
5339 | { | |
5340 | if (fold_convertible_p (req_type, replace_info->new_tree)) | |
5341 | replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree); | |
5342 | else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree))) | |
5343 | replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree); | |
5344 | else | |
5345 | { | |
5346 | if (dump_file) | |
5347 | { | |
5348 | fprintf (dump_file, " const "); | |
5349 | print_generic_expr (dump_file, replace_info->new_tree, 0); | |
5350 | fprintf (dump_file, " can't be converted to param "); | |
5351 | print_generic_expr (dump_file, parm, 0); | |
5352 | fprintf (dump_file, "\n"); | |
5353 | } | |
5354 | replace_info->old_tree = NULL; | |
5355 | } | |
5356 | } | |
5357 | } | |
5358 | else | |
5359 | gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL); | |
5360 | if (replace_info->old_tree) | |
5361 | { | |
5362 | init = setup_one_parameter (&id, replace_info->old_tree, | |
5363 | replace_info->new_tree, id.src_fn, | |
5364 | NULL, | |
5365 | &vars); | |
5366 | if (init) | |
5367 | init_stmts.safe_push (init); | |
922f15c2 | 5368 | } |
00fc2333 | 5369 | } |
19734dd8 | 5370 | } |
eb50f5f4 JH |
5371 | /* Copy the function's arguments. */ |
5372 | if (DECL_ARGUMENTS (old_decl) != NULL_TREE) | |
5373 | DECL_ARGUMENTS (new_decl) = | |
5374 | copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id, | |
5375 | args_to_skip, &vars); | |
b8698a0f | 5376 | |
eb50f5f4 | 5377 | DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id); |
474086eb | 5378 | BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl; |
b8698a0f | 5379 | |
0f1961a2 | 5380 | declare_inline_vars (DECL_INITIAL (new_decl), vars); |
9187e02d | 5381 | |
9771b263 | 5382 | if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls)) |
19734dd8 | 5383 | /* Add local vars. */ |
ae0379fc | 5384 | add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id); |
b8698a0f | 5385 | |
90dda0e9 | 5386 | if (DECL_RESULT (old_decl) == NULL_TREE) |
1a2c27e9 | 5387 | ; |
90dda0e9 | 5388 | else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl)))) |
1a2c27e9 EB |
5389 | { |
5390 | DECL_RESULT (new_decl) | |
5391 | = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)), | |
5392 | RESULT_DECL, NULL_TREE, void_type_node); | |
5393 | DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl; | |
5394 | cfun->returns_struct = 0; | |
5395 | cfun->returns_pcc_struct = 0; | |
5396 | } | |
5397 | else | |
19734dd8 | 5398 | { |
6ff38230 RG |
5399 | tree old_name; |
5400 | DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id); | |
19734dd8 | 5401 | lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl)); |
6ff38230 RG |
5402 | if (gimple_in_ssa_p (id.src_cfun) |
5403 | && DECL_BY_REFERENCE (DECL_RESULT (old_decl)) | |
32244553 | 5404 | && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl)))) |
6ff38230 RG |
5405 | { |
5406 | tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL); | |
5407 | insert_decl_map (&id, old_name, new_name); | |
5408 | SSA_NAME_DEF_STMT (new_name) = gimple_build_nop (); | |
32244553 | 5409 | set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name); |
6ff38230 | 5410 | } |
19734dd8 | 5411 | } |
b8698a0f | 5412 | |
a9e0d843 | 5413 | /* Set up the destination functions loop tree. */ |
0fc822d0 | 5414 | if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL) |
a9e0d843 RB |
5415 | { |
5416 | cfun->curr_properties &= ~PROP_loops; | |
5417 | loop_optimizer_init (AVOID_CFG_MODIFICATIONS); | |
5418 | cfun->curr_properties |= PROP_loops; | |
5419 | } | |
5420 | ||
6ff38230 RG |
5421 | /* Copy the Function's body. */ |
5422 | copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE, | |
f3b331d1 | 5423 | ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, new_entry); |
6ff38230 | 5424 | |
19734dd8 RL |
5425 | /* Renumber the lexical scoping (non-code) blocks consecutively. */ |
5426 | number_blocks (new_decl); | |
5427 | ||
b5b8b0ac AO |
5428 | /* We want to create the BB unconditionally, so that the addition of |
5429 | debug stmts doesn't affect BB count, which may in the end cause | |
5430 | codegen differences. */ | |
5431 | bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR)); | |
9771b263 DN |
5432 | while (init_stmts.length ()) |
5433 | insert_init_stmt (&id, bb, init_stmts.pop ()); | |
08ad1d6d | 5434 | update_clone_info (&id); |
0f1961a2 | 5435 | |
27dbd3ac RH |
5436 | /* Remap the nonlocal_goto_save_area, if any. */ |
5437 | if (cfun->nonlocal_goto_save_area) | |
5438 | { | |
5439 | struct walk_stmt_info wi; | |
5440 | ||
5441 | memset (&wi, 0, sizeof (wi)); | |
5442 | wi.info = &id; | |
5443 | walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL); | |
5444 | } | |
5445 | ||
19734dd8 | 5446 | /* Clean up. */ |
6be42dd4 | 5447 | pointer_map_destroy (id.decl_map); |
b5b8b0ac AO |
5448 | if (id.debug_map) |
5449 | pointer_map_destroy (id.debug_map); | |
5006671f RG |
5450 | free_dominance_info (CDI_DOMINATORS); |
5451 | free_dominance_info (CDI_POST_DOMINATORS); | |
9187e02d JH |
5452 | |
5453 | fold_marked_statements (0, id.statements_to_fold); | |
5454 | pointer_set_destroy (id.statements_to_fold); | |
5455 | fold_cond_expr_cond (); | |
5456 | delete_unreachable_blocks_update_callgraph (&id); | |
67348ccc | 5457 | if (id.dst_node->definition) |
99b766fc | 5458 | cgraph_rebuild_references (); |
9187e02d | 5459 | update_ssa (TODO_update_ssa); |
b35366ce JH |
5460 | |
5461 | /* After partial cloning we need to rescale frequencies, so they are | |
5462 | within proper range in the cloned function. */ | |
5463 | if (new_entry) | |
5464 | { | |
5465 | struct cgraph_edge *e; | |
5466 | rebuild_frequencies (); | |
5467 | ||
5468 | new_version_node->count = ENTRY_BLOCK_PTR->count; | |
5469 | for (e = new_version_node->callees; e; e = e->next_callee) | |
5470 | { | |
5471 | basic_block bb = gimple_bb (e->call_stmt); | |
02ec6988 MJ |
5472 | e->frequency = compute_call_stmt_bb_frequency (current_function_decl, |
5473 | bb); | |
5474 | e->count = bb->count; | |
5475 | } | |
5476 | for (e = new_version_node->indirect_calls; e; e = e->next_callee) | |
5477 | { | |
5478 | basic_block bb = gimple_bb (e->call_stmt); | |
5479 | e->frequency = compute_call_stmt_bb_frequency (current_function_decl, | |
5480 | bb); | |
b35366ce JH |
5481 | e->count = bb->count; |
5482 | } | |
5483 | } | |
5484 | ||
9187e02d JH |
5485 | free_dominance_info (CDI_DOMINATORS); |
5486 | free_dominance_info (CDI_POST_DOMINATORS); | |
5487 | ||
9771b263 | 5488 | gcc_assert (!id.debug_stmts.exists ()); |
110cfe1c | 5489 | pop_cfun (); |
19734dd8 RL |
5490 | return; |
5491 | } | |
5492 | ||
f82a627c EB |
5493 | /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate |
5494 | the callee and return the inlined body on success. */ | |
5495 | ||
5496 | tree | |
5497 | maybe_inline_call_in_expr (tree exp) | |
5498 | { | |
5499 | tree fn = get_callee_fndecl (exp); | |
5500 | ||
5501 | /* We can only try to inline "const" functions. */ | |
5502 | if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn)) | |
5503 | { | |
5504 | struct pointer_map_t *decl_map = pointer_map_create (); | |
5505 | call_expr_arg_iterator iter; | |
5506 | copy_body_data id; | |
5507 | tree param, arg, t; | |
5508 | ||
5509 | /* Remap the parameters. */ | |
5510 | for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter); | |
5511 | param; | |
910ad8de | 5512 | param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter)) |
f82a627c EB |
5513 | *pointer_map_insert (decl_map, param) = arg; |
5514 | ||
5515 | memset (&id, 0, sizeof (id)); | |
5516 | id.src_fn = fn; | |
5517 | id.dst_fn = current_function_decl; | |
5518 | id.src_cfun = DECL_STRUCT_FUNCTION (fn); | |
5519 | id.decl_map = decl_map; | |
5520 | ||
5521 | id.copy_decl = copy_decl_no_change; | |
5522 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
5523 | id.transform_new_cfg = false; | |
5524 | id.transform_return_to_modify = true; | |
78bbd765 | 5525 | id.transform_parameter = true; |
267ffce3 | 5526 | id.transform_lang_insert_block = NULL; |
f82a627c EB |
5527 | |
5528 | /* Make sure not to unshare trees behind the front-end's back | |
5529 | since front-end specific mechanisms may rely on sharing. */ | |
5530 | id.regimplify = false; | |
5531 | id.do_not_unshare = true; | |
5532 | ||
5533 | /* We're not inside any EH region. */ | |
1d65f45c | 5534 | id.eh_lp_nr = 0; |
f82a627c EB |
5535 | |
5536 | t = copy_tree_body (&id); | |
5537 | pointer_map_destroy (decl_map); | |
5538 | ||
5539 | /* We can only return something suitable for use in a GENERIC | |
5540 | expression tree. */ | |
5541 | if (TREE_CODE (t) == MODIFY_EXPR) | |
5542 | return TREE_OPERAND (t, 1); | |
5543 | } | |
5544 | ||
5545 | return NULL_TREE; | |
5546 | } | |
5547 | ||
52dd234b RH |
5548 | /* Duplicate a type, fields and all. */ |
5549 | ||
5550 | tree | |
5551 | build_duplicate_type (tree type) | |
5552 | { | |
1b369fae | 5553 | struct copy_body_data id; |
52dd234b RH |
5554 | |
5555 | memset (&id, 0, sizeof (id)); | |
1b369fae RH |
5556 | id.src_fn = current_function_decl; |
5557 | id.dst_fn = current_function_decl; | |
5558 | id.src_cfun = cfun; | |
6be42dd4 | 5559 | id.decl_map = pointer_map_create (); |
b5b8b0ac | 5560 | id.debug_map = NULL; |
4009f2e7 | 5561 | id.copy_decl = copy_decl_no_change; |
52dd234b RH |
5562 | |
5563 | type = remap_type_1 (type, &id); | |
5564 | ||
6be42dd4 | 5565 | pointer_map_destroy (id.decl_map); |
b5b8b0ac AO |
5566 | if (id.debug_map) |
5567 | pointer_map_destroy (id.debug_map); | |
52dd234b | 5568 | |
f31c9f09 DG |
5569 | TYPE_CANONICAL (type) = type; |
5570 | ||
52dd234b RH |
5571 | return type; |
5572 | } |