]>
Commit | Line | Data |
---|---|---|
ac534736 | 1 | /* Tree inlining. |
65401a0b | 2 | Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 |
ebb07520 | 3 | Free Software Foundation, Inc. |
588d3ade AO |
4 | Contributed by Alexandre Oliva <aoliva@redhat.com> |
5 | ||
54a7b573 | 6 | This file is part of GCC. |
588d3ade | 7 | |
54a7b573 | 8 | GCC is free software; you can redistribute it and/or modify |
588d3ade | 9 | it under the terms of the GNU General Public License as published by |
9dcd6f09 | 10 | the Free Software Foundation; either version 3, or (at your option) |
588d3ade AO |
11 | any later version. |
12 | ||
54a7b573 | 13 | GCC is distributed in the hope that it will be useful, |
588d3ade AO |
14 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
15 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
16 | GNU General Public License for more details. | |
17 | ||
18 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
588d3ade AO |
21 | |
22 | #include "config.h" | |
23 | #include "system.h" | |
4977bab6 ZW |
24 | #include "coretypes.h" |
25 | #include "tm.h" | |
69dcadff | 26 | #include "toplev.h" |
588d3ade AO |
27 | #include "tree.h" |
28 | #include "tree-inline.h" | |
d4e4baa9 AO |
29 | #include "rtl.h" |
30 | #include "expr.h" | |
31 | #include "flags.h" | |
32 | #include "params.h" | |
33 | #include "input.h" | |
34 | #include "insn-config.h" | |
d4e4baa9 | 35 | #include "hashtab.h" |
d23c55c2 | 36 | #include "langhooks.h" |
e21aff8a SB |
37 | #include "basic-block.h" |
38 | #include "tree-iterator.h" | |
1c4a429a | 39 | #include "cgraph.h" |
ddd2d57e | 40 | #include "intl.h" |
6de9cd9a | 41 | #include "tree-mudflap.h" |
089efaa4 | 42 | #include "tree-flow.h" |
18c6ada9 | 43 | #include "function.h" |
e21aff8a SB |
44 | #include "ggc.h" |
45 | #include "tree-flow.h" | |
6de9cd9a | 46 | #include "diagnostic.h" |
e21aff8a | 47 | #include "except.h" |
1eb3331e | 48 | #include "debug.h" |
e21aff8a | 49 | #include "pointer-set.h" |
19734dd8 | 50 | #include "ipa-prop.h" |
6946b3f7 | 51 | #include "value-prof.h" |
110cfe1c | 52 | #include "tree-pass.h" |
18177c7e RG |
53 | #include "target.h" |
54 | #include "integrate.h" | |
d4e4baa9 | 55 | |
6de9cd9a DN |
56 | /* I'm not real happy about this, but we need to handle gimple and |
57 | non-gimple trees. */ | |
726a989a | 58 | #include "gimple.h" |
588d3ade | 59 | |
1b369fae | 60 | /* Inlining, Cloning, Versioning, Parallelization |
e21aff8a SB |
61 | |
62 | Inlining: a function body is duplicated, but the PARM_DECLs are | |
63 | remapped into VAR_DECLs, and non-void RETURN_EXPRs become | |
726a989a | 64 | MODIFY_EXPRs that store to a dedicated returned-value variable. |
e21aff8a SB |
65 | The duplicated eh_region info of the copy will later be appended |
66 | to the info for the caller; the eh_region info in copied throwing | |
1d65f45c | 67 | statements and RESX statements are adjusted accordingly. |
e21aff8a | 68 | |
e21aff8a SB |
69 | Cloning: (only in C++) We have one body for a con/de/structor, and |
70 | multiple function decls, each with a unique parameter list. | |
71 | Duplicate the body, using the given splay tree; some parameters | |
72 | will become constants (like 0 or 1). | |
73 | ||
1b369fae RH |
74 | Versioning: a function body is duplicated and the result is a new |
75 | function rather than into blocks of an existing function as with | |
76 | inlining. Some parameters will become constants. | |
77 | ||
78 | Parallelization: a region of a function is duplicated resulting in | |
79 | a new function. Variables may be replaced with complex expressions | |
80 | to enable shared variable semantics. | |
81 | ||
e21aff8a SB |
82 | All of these will simultaneously lookup any callgraph edges. If |
83 | we're going to inline the duplicated function body, and the given | |
84 | function has some cloned callgraph nodes (one for each place this | |
85 | function will be inlined) those callgraph edges will be duplicated. | |
1b369fae | 86 | If we're cloning the body, those callgraph edges will be |
e21aff8a SB |
87 | updated to point into the new body. (Note that the original |
88 | callgraph node and edge list will not be altered.) | |
89 | ||
726a989a | 90 | See the CALL_EXPR handling case in copy_tree_body_r (). */ |
e21aff8a | 91 | |
d4e4baa9 AO |
92 | /* To Do: |
93 | ||
94 | o In order to make inlining-on-trees work, we pessimized | |
95 | function-local static constants. In particular, they are now | |
96 | always output, even when not addressed. Fix this by treating | |
97 | function-local static constants just like global static | |
98 | constants; the back-end already knows not to output them if they | |
99 | are not needed. | |
100 | ||
101 | o Provide heuristics to clamp inlining of recursive template | |
102 | calls? */ | |
103 | ||
7f9bc51b ZD |
104 | |
105 | /* Weights that estimate_num_insns uses for heuristics in inlining. */ | |
106 | ||
107 | eni_weights eni_inlining_weights; | |
108 | ||
109 | /* Weights that estimate_num_insns uses to estimate the size of the | |
110 | produced code. */ | |
111 | ||
112 | eni_weights eni_size_weights; | |
113 | ||
114 | /* Weights that estimate_num_insns uses to estimate the time necessary | |
115 | to execute the produced code. */ | |
116 | ||
117 | eni_weights eni_time_weights; | |
118 | ||
d4e4baa9 AO |
119 | /* Prototypes. */ |
120 | ||
1b369fae | 121 | static tree declare_return_variable (copy_body_data *, tree, tree, tree *); |
1b369fae | 122 | static void remap_block (tree *, copy_body_data *); |
1b369fae | 123 | static void copy_bind_expr (tree *, int *, copy_body_data *); |
6de9cd9a | 124 | static tree mark_local_for_remap_r (tree *, int *, void *); |
19114537 | 125 | static void unsave_expr_1 (tree); |
6de9cd9a | 126 | static tree unsave_r (tree *, int *, void *); |
e21aff8a | 127 | static void declare_inline_vars (tree, tree); |
892c7e1e | 128 | static void remap_save_expr (tree *, void *, int *); |
4a283090 | 129 | static void prepend_lexical_block (tree current_block, tree new_block); |
1b369fae | 130 | static tree copy_decl_to_var (tree, copy_body_data *); |
c08cd4c1 | 131 | static tree copy_result_decl_to_var (tree, copy_body_data *); |
1b369fae | 132 | static tree copy_decl_maybe_to_var (tree, copy_body_data *); |
726a989a | 133 | static gimple remap_gimple_stmt (gimple, copy_body_data *); |
078c3644 | 134 | static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id); |
e21aff8a | 135 | |
5e20bdd7 JZ |
136 | /* Insert a tree->tree mapping for ID. Despite the name suggests |
137 | that the trees should be variables, it is used for more than that. */ | |
138 | ||
1b369fae RH |
139 | void |
140 | insert_decl_map (copy_body_data *id, tree key, tree value) | |
5e20bdd7 | 141 | { |
6be42dd4 | 142 | *pointer_map_insert (id->decl_map, key) = value; |
5e20bdd7 JZ |
143 | |
144 | /* Always insert an identity map as well. If we see this same new | |
145 | node again, we won't want to duplicate it a second time. */ | |
146 | if (key != value) | |
6be42dd4 | 147 | *pointer_map_insert (id->decl_map, value) = value; |
5e20bdd7 JZ |
148 | } |
149 | ||
b5b8b0ac AO |
150 | /* Insert a tree->tree mapping for ID. This is only used for |
151 | variables. */ | |
152 | ||
153 | static void | |
154 | insert_debug_decl_map (copy_body_data *id, tree key, tree value) | |
155 | { | |
156 | if (!gimple_in_ssa_p (id->src_cfun)) | |
157 | return; | |
158 | ||
159 | if (!MAY_HAVE_DEBUG_STMTS) | |
160 | return; | |
161 | ||
162 | if (!target_for_debug_bind (key)) | |
163 | return; | |
164 | ||
165 | gcc_assert (TREE_CODE (key) == PARM_DECL); | |
166 | gcc_assert (TREE_CODE (value) == VAR_DECL); | |
167 | ||
168 | if (!id->debug_map) | |
169 | id->debug_map = pointer_map_create (); | |
170 | ||
171 | *pointer_map_insert (id->debug_map, key) = value; | |
172 | } | |
173 | ||
110cfe1c JH |
174 | /* Construct new SSA name for old NAME. ID is the inline context. */ |
175 | ||
176 | static tree | |
177 | remap_ssa_name (tree name, copy_body_data *id) | |
178 | { | |
82d6e6fc | 179 | tree new_tree; |
6be42dd4 | 180 | tree *n; |
110cfe1c JH |
181 | |
182 | gcc_assert (TREE_CODE (name) == SSA_NAME); | |
183 | ||
6be42dd4 | 184 | n = (tree *) pointer_map_contains (id->decl_map, name); |
110cfe1c | 185 | if (n) |
129a37fc | 186 | return unshare_expr (*n); |
110cfe1c JH |
187 | |
188 | /* Do not set DEF_STMT yet as statement is not copied yet. We do that | |
189 | in copy_bb. */ | |
82d6e6fc | 190 | new_tree = remap_decl (SSA_NAME_VAR (name), id); |
726a989a | 191 | |
110cfe1c JH |
192 | /* We might've substituted constant or another SSA_NAME for |
193 | the variable. | |
194 | ||
195 | Replace the SSA name representing RESULT_DECL by variable during | |
196 | inlining: this saves us from need to introduce PHI node in a case | |
197 | return value is just partly initialized. */ | |
82d6e6fc | 198 | if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL) |
110cfe1c JH |
199 | && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL |
200 | || !id->transform_return_to_modify)) | |
201 | { | |
82d6e6fc KG |
202 | new_tree = make_ssa_name (new_tree, NULL); |
203 | insert_decl_map (id, name, new_tree); | |
204 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree) | |
110cfe1c | 205 | = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name); |
82d6e6fc | 206 | TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree)); |
726a989a | 207 | if (gimple_nop_p (SSA_NAME_DEF_STMT (name))) |
045685a9 JH |
208 | { |
209 | /* By inlining function having uninitialized variable, we might | |
210 | extend the lifetime (variable might get reused). This cause | |
211 | ICE in the case we end up extending lifetime of SSA name across | |
fa10beec | 212 | abnormal edge, but also increase register pressure. |
045685a9 | 213 | |
726a989a RB |
214 | We simply initialize all uninitialized vars by 0 except |
215 | for case we are inlining to very first BB. We can avoid | |
216 | this for all BBs that are not inside strongly connected | |
217 | regions of the CFG, but this is expensive to test. */ | |
218 | if (id->entry_bb | |
219 | && is_gimple_reg (SSA_NAME_VAR (name)) | |
045685a9 | 220 | && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL |
0723b99a | 221 | && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest |
045685a9 JH |
222 | || EDGE_COUNT (id->entry_bb->preds) != 1)) |
223 | { | |
726a989a RB |
224 | gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb); |
225 | gimple init_stmt; | |
226 | ||
82d6e6fc KG |
227 | init_stmt = gimple_build_assign (new_tree, |
228 | fold_convert (TREE_TYPE (new_tree), | |
045685a9 | 229 | integer_zero_node)); |
726a989a | 230 | gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT); |
82d6e6fc | 231 | SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0; |
045685a9 JH |
232 | } |
233 | else | |
234 | { | |
82d6e6fc | 235 | SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop (); |
726a989a RB |
236 | if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) |
237 | == name) | |
82d6e6fc | 238 | set_default_def (SSA_NAME_VAR (new_tree), new_tree); |
045685a9 JH |
239 | } |
240 | } | |
110cfe1c JH |
241 | } |
242 | else | |
82d6e6fc KG |
243 | insert_decl_map (id, name, new_tree); |
244 | return new_tree; | |
110cfe1c JH |
245 | } |
246 | ||
b5b8b0ac AO |
247 | /* If nonzero, we're remapping the contents of inlined debug |
248 | statements. If negative, an error has occurred, such as a | |
249 | reference to a variable that isn't available in the inlined | |
250 | context. */ | |
251 | int processing_debug_stmt = 0; | |
252 | ||
e21aff8a | 253 | /* Remap DECL during the copying of the BLOCK tree for the function. */ |
d4e4baa9 | 254 | |
1b369fae RH |
255 | tree |
256 | remap_decl (tree decl, copy_body_data *id) | |
d4e4baa9 | 257 | { |
6be42dd4 | 258 | tree *n; |
e21aff8a SB |
259 | tree fn; |
260 | ||
261 | /* We only remap local variables in the current function. */ | |
1b369fae | 262 | fn = id->src_fn; |
3c2a7a6a | 263 | |
e21aff8a SB |
264 | /* See if we have remapped this declaration. */ |
265 | ||
6be42dd4 | 266 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
e21aff8a | 267 | |
b5b8b0ac AO |
268 | if (!n && processing_debug_stmt) |
269 | { | |
270 | processing_debug_stmt = -1; | |
271 | return decl; | |
272 | } | |
273 | ||
e21aff8a SB |
274 | /* If we didn't already have an equivalent for this declaration, |
275 | create one now. */ | |
d4e4baa9 AO |
276 | if (!n) |
277 | { | |
d4e4baa9 | 278 | /* Make a copy of the variable or label. */ |
1b369fae | 279 | tree t = id->copy_decl (decl, id); |
19734dd8 | 280 | |
596b98ce AO |
281 | /* Remember it, so that if we encounter this local entity again |
282 | we can reuse this copy. Do this early because remap_type may | |
283 | need this decl for TYPE_STUB_DECL. */ | |
284 | insert_decl_map (id, decl, t); | |
285 | ||
1b369fae RH |
286 | if (!DECL_P (t)) |
287 | return t; | |
288 | ||
3c2a7a6a RH |
289 | /* Remap types, if necessary. */ |
290 | TREE_TYPE (t) = remap_type (TREE_TYPE (t), id); | |
291 | if (TREE_CODE (t) == TYPE_DECL) | |
292 | DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id); | |
3c2a7a6a RH |
293 | |
294 | /* Remap sizes as necessary. */ | |
726a989a RB |
295 | walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL); |
296 | walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL); | |
d4e4baa9 | 297 | |
8c27b7d4 | 298 | /* If fields, do likewise for offset and qualifier. */ |
5377d5ba RK |
299 | if (TREE_CODE (t) == FIELD_DECL) |
300 | { | |
726a989a | 301 | walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL); |
5377d5ba | 302 | if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE) |
726a989a | 303 | walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL); |
5377d5ba RK |
304 | } |
305 | ||
110cfe1c JH |
306 | if (cfun && gimple_in_ssa_p (cfun) |
307 | && (TREE_CODE (t) == VAR_DECL | |
308 | || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL)) | |
309 | { | |
310 | tree def = gimple_default_def (id->src_cfun, decl); | |
311 | get_var_ann (t); | |
312 | if (TREE_CODE (decl) != PARM_DECL && def) | |
313 | { | |
314 | tree map = remap_ssa_name (def, id); | |
315 | /* Watch out RESULT_DECLs whose SSA names map directly | |
316 | to them. */ | |
045685a9 | 317 | if (TREE_CODE (map) == SSA_NAME |
726a989a | 318 | && gimple_nop_p (SSA_NAME_DEF_STMT (map))) |
110cfe1c JH |
319 | set_default_def (t, map); |
320 | } | |
321 | add_referenced_var (t); | |
322 | } | |
5e20bdd7 | 323 | return t; |
d4e4baa9 AO |
324 | } |
325 | ||
f82a627c EB |
326 | if (id->do_not_unshare) |
327 | return *n; | |
328 | else | |
329 | return unshare_expr (*n); | |
d4e4baa9 AO |
330 | } |
331 | ||
3c2a7a6a | 332 | static tree |
1b369fae | 333 | remap_type_1 (tree type, copy_body_data *id) |
3c2a7a6a | 334 | { |
82d6e6fc | 335 | tree new_tree, t; |
3c2a7a6a | 336 | |
ed397c43 RK |
337 | /* We do need a copy. build and register it now. If this is a pointer or |
338 | reference type, remap the designated type and make a new pointer or | |
339 | reference type. */ | |
340 | if (TREE_CODE (type) == POINTER_TYPE) | |
341 | { | |
82d6e6fc | 342 | new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id), |
ed397c43 RK |
343 | TYPE_MODE (type), |
344 | TYPE_REF_CAN_ALIAS_ALL (type)); | |
040c6d51 MM |
345 | if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type)) |
346 | new_tree = build_type_attribute_qual_variant (new_tree, | |
347 | TYPE_ATTRIBUTES (type), | |
348 | TYPE_QUALS (type)); | |
82d6e6fc KG |
349 | insert_decl_map (id, type, new_tree); |
350 | return new_tree; | |
ed397c43 RK |
351 | } |
352 | else if (TREE_CODE (type) == REFERENCE_TYPE) | |
353 | { | |
82d6e6fc | 354 | new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id), |
ed397c43 RK |
355 | TYPE_MODE (type), |
356 | TYPE_REF_CAN_ALIAS_ALL (type)); | |
040c6d51 MM |
357 | if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type)) |
358 | new_tree = build_type_attribute_qual_variant (new_tree, | |
359 | TYPE_ATTRIBUTES (type), | |
360 | TYPE_QUALS (type)); | |
82d6e6fc KG |
361 | insert_decl_map (id, type, new_tree); |
362 | return new_tree; | |
ed397c43 RK |
363 | } |
364 | else | |
82d6e6fc | 365 | new_tree = copy_node (type); |
ed397c43 | 366 | |
82d6e6fc | 367 | insert_decl_map (id, type, new_tree); |
3c2a7a6a RH |
368 | |
369 | /* This is a new type, not a copy of an old type. Need to reassociate | |
370 | variants. We can handle everything except the main variant lazily. */ | |
371 | t = TYPE_MAIN_VARIANT (type); | |
372 | if (type != t) | |
373 | { | |
374 | t = remap_type (t, id); | |
82d6e6fc KG |
375 | TYPE_MAIN_VARIANT (new_tree) = t; |
376 | TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t); | |
377 | TYPE_NEXT_VARIANT (t) = new_tree; | |
3c2a7a6a RH |
378 | } |
379 | else | |
380 | { | |
82d6e6fc KG |
381 | TYPE_MAIN_VARIANT (new_tree) = new_tree; |
382 | TYPE_NEXT_VARIANT (new_tree) = NULL; | |
3c2a7a6a RH |
383 | } |
384 | ||
596b98ce | 385 | if (TYPE_STUB_DECL (type)) |
82d6e6fc | 386 | TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id); |
596b98ce | 387 | |
3c2a7a6a | 388 | /* Lazily create pointer and reference types. */ |
82d6e6fc KG |
389 | TYPE_POINTER_TO (new_tree) = NULL; |
390 | TYPE_REFERENCE_TO (new_tree) = NULL; | |
3c2a7a6a | 391 | |
82d6e6fc | 392 | switch (TREE_CODE (new_tree)) |
3c2a7a6a RH |
393 | { |
394 | case INTEGER_TYPE: | |
395 | case REAL_TYPE: | |
325217ed | 396 | case FIXED_POINT_TYPE: |
3c2a7a6a RH |
397 | case ENUMERAL_TYPE: |
398 | case BOOLEAN_TYPE: | |
82d6e6fc | 399 | t = TYPE_MIN_VALUE (new_tree); |
3c2a7a6a | 400 | if (t && TREE_CODE (t) != INTEGER_CST) |
82d6e6fc | 401 | walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL); |
1c9766da | 402 | |
82d6e6fc | 403 | t = TYPE_MAX_VALUE (new_tree); |
3c2a7a6a | 404 | if (t && TREE_CODE (t) != INTEGER_CST) |
82d6e6fc KG |
405 | walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL); |
406 | return new_tree; | |
9f63daea | 407 | |
3c2a7a6a | 408 | case FUNCTION_TYPE: |
82d6e6fc KG |
409 | TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id); |
410 | walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL); | |
411 | return new_tree; | |
3c2a7a6a RH |
412 | |
413 | case ARRAY_TYPE: | |
82d6e6fc KG |
414 | TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id); |
415 | TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id); | |
3c2a7a6a RH |
416 | break; |
417 | ||
418 | case RECORD_TYPE: | |
419 | case UNION_TYPE: | |
420 | case QUAL_UNION_TYPE: | |
52dd234b RH |
421 | { |
422 | tree f, nf = NULL; | |
423 | ||
82d6e6fc | 424 | for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f)) |
52dd234b RH |
425 | { |
426 | t = remap_decl (f, id); | |
82d6e6fc | 427 | DECL_CONTEXT (t) = new_tree; |
52dd234b RH |
428 | TREE_CHAIN (t) = nf; |
429 | nf = t; | |
430 | } | |
82d6e6fc | 431 | TYPE_FIELDS (new_tree) = nreverse (nf); |
52dd234b | 432 | } |
3c2a7a6a RH |
433 | break; |
434 | ||
3c2a7a6a RH |
435 | case OFFSET_TYPE: |
436 | default: | |
437 | /* Shouldn't have been thought variable sized. */ | |
1e128c5f | 438 | gcc_unreachable (); |
3c2a7a6a RH |
439 | } |
440 | ||
82d6e6fc KG |
441 | walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL); |
442 | walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL); | |
3c2a7a6a | 443 | |
82d6e6fc | 444 | return new_tree; |
3c2a7a6a RH |
445 | } |
446 | ||
1b369fae RH |
447 | tree |
448 | remap_type (tree type, copy_body_data *id) | |
52dd234b | 449 | { |
6be42dd4 | 450 | tree *node; |
4f5c64b8 | 451 | tree tmp; |
52dd234b RH |
452 | |
453 | if (type == NULL) | |
454 | return type; | |
455 | ||
456 | /* See if we have remapped this type. */ | |
6be42dd4 | 457 | node = (tree *) pointer_map_contains (id->decl_map, type); |
52dd234b | 458 | if (node) |
6be42dd4 | 459 | return *node; |
52dd234b RH |
460 | |
461 | /* The type only needs remapping if it's variably modified. */ | |
1b369fae | 462 | if (! variably_modified_type_p (type, id->src_fn)) |
52dd234b RH |
463 | { |
464 | insert_decl_map (id, type, type); | |
465 | return type; | |
466 | } | |
467 | ||
4f5c64b8 RG |
468 | id->remapping_type_depth++; |
469 | tmp = remap_type_1 (type, id); | |
470 | id->remapping_type_depth--; | |
471 | ||
472 | return tmp; | |
52dd234b RH |
473 | } |
474 | ||
13e4e36e L |
475 | /* Return previously remapped type of TYPE in ID. Return NULL if TYPE |
476 | is NULL or TYPE has not been remapped before. */ | |
477 | ||
478 | static tree | |
479 | remapped_type (tree type, copy_body_data *id) | |
480 | { | |
481 | tree *node; | |
482 | ||
483 | if (type == NULL) | |
484 | return type; | |
485 | ||
486 | /* See if we have remapped this type. */ | |
487 | node = (tree *) pointer_map_contains (id->decl_map, type); | |
488 | if (node) | |
489 | return *node; | |
490 | else | |
491 | return NULL; | |
492 | } | |
493 | ||
494 | /* The type only needs remapping if it's variably modified. */ | |
526d73ab JH |
495 | /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */ |
496 | ||
497 | static bool | |
498 | can_be_nonlocal (tree decl, copy_body_data *id) | |
499 | { | |
500 | /* We can not duplicate function decls. */ | |
501 | if (TREE_CODE (decl) == FUNCTION_DECL) | |
502 | return true; | |
503 | ||
504 | /* Local static vars must be non-local or we get multiple declaration | |
505 | problems. */ | |
506 | if (TREE_CODE (decl) == VAR_DECL | |
507 | && !auto_var_in_fn_p (decl, id->src_fn)) | |
508 | return true; | |
509 | ||
510 | /* At the moment dwarf2out can handle only these types of nodes. We | |
511 | can support more later. */ | |
512 | if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL) | |
513 | return false; | |
514 | ||
13e4e36e L |
515 | /* We must use global type. We call remapped_type instead of |
516 | remap_type since we don't want to remap this type here if it | |
517 | hasn't been remapped before. */ | |
518 | if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id)) | |
526d73ab JH |
519 | return false; |
520 | ||
521 | /* Wihtout SSA we can't tell if variable is used. */ | |
522 | if (!gimple_in_ssa_p (cfun)) | |
523 | return false; | |
524 | ||
525 | /* Live variables must be copied so we can attach DECL_RTL. */ | |
526 | if (var_ann (decl)) | |
527 | return false; | |
528 | ||
529 | return true; | |
530 | } | |
531 | ||
6de9cd9a | 532 | static tree |
526d73ab | 533 | remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id) |
d4e4baa9 | 534 | { |
6de9cd9a DN |
535 | tree old_var; |
536 | tree new_decls = NULL_TREE; | |
d4e4baa9 | 537 | |
6de9cd9a DN |
538 | /* Remap its variables. */ |
539 | for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var)) | |
d4e4baa9 | 540 | { |
6de9cd9a | 541 | tree new_var; |
526d73ab | 542 | tree origin_var = DECL_ORIGIN (old_var); |
6de9cd9a | 543 | |
526d73ab | 544 | if (can_be_nonlocal (old_var, id)) |
30be951a | 545 | { |
526d73ab | 546 | if (TREE_CODE (old_var) == VAR_DECL |
5c3ec539 | 547 | && ! DECL_EXTERNAL (old_var) |
526d73ab JH |
548 | && (var_ann (old_var) || !gimple_in_ssa_p (cfun))) |
549 | cfun->local_decls = tree_cons (NULL_TREE, old_var, | |
550 | cfun->local_decls); | |
9e6aced0 | 551 | if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE) |
526d73ab JH |
552 | && !DECL_IGNORED_P (old_var) |
553 | && nonlocalized_list) | |
554 | VEC_safe_push (tree, gc, *nonlocalized_list, origin_var); | |
30be951a JH |
555 | continue; |
556 | } | |
557 | ||
6de9cd9a DN |
558 | /* Remap the variable. */ |
559 | new_var = remap_decl (old_var, id); | |
560 | ||
726a989a | 561 | /* If we didn't remap this variable, we can't mess with its |
6de9cd9a DN |
562 | TREE_CHAIN. If we remapped this variable to the return slot, it's |
563 | already declared somewhere else, so don't declare it here. */ | |
526d73ab JH |
564 | |
565 | if (new_var == id->retvar) | |
6de9cd9a | 566 | ; |
526d73ab JH |
567 | else if (!new_var) |
568 | { | |
9e6aced0 | 569 | if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE) |
526d73ab JH |
570 | && !DECL_IGNORED_P (old_var) |
571 | && nonlocalized_list) | |
572 | VEC_safe_push (tree, gc, *nonlocalized_list, origin_var); | |
573 | } | |
d4e4baa9 AO |
574 | else |
575 | { | |
1e128c5f | 576 | gcc_assert (DECL_P (new_var)); |
6de9cd9a DN |
577 | TREE_CHAIN (new_var) = new_decls; |
578 | new_decls = new_var; | |
d4e4baa9 | 579 | } |
d4e4baa9 | 580 | } |
d4e4baa9 | 581 | |
6de9cd9a DN |
582 | return nreverse (new_decls); |
583 | } | |
584 | ||
585 | /* Copy the BLOCK to contain remapped versions of the variables | |
586 | therein. And hook the new block into the block-tree. */ | |
587 | ||
588 | static void | |
1b369fae | 589 | remap_block (tree *block, copy_body_data *id) |
6de9cd9a | 590 | { |
d436bff8 AH |
591 | tree old_block; |
592 | tree new_block; | |
d436bff8 AH |
593 | tree fn; |
594 | ||
595 | /* Make the new block. */ | |
596 | old_block = *block; | |
597 | new_block = make_node (BLOCK); | |
598 | TREE_USED (new_block) = TREE_USED (old_block); | |
599 | BLOCK_ABSTRACT_ORIGIN (new_block) = old_block; | |
3e2844cb | 600 | BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block); |
526d73ab JH |
601 | BLOCK_NONLOCALIZED_VARS (new_block) |
602 | = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block)); | |
d436bff8 AH |
603 | *block = new_block; |
604 | ||
605 | /* Remap its variables. */ | |
526d73ab JH |
606 | BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), |
607 | &BLOCK_NONLOCALIZED_VARS (new_block), | |
608 | id); | |
d436bff8 | 609 | |
1b369fae RH |
610 | fn = id->dst_fn; |
611 | ||
612 | if (id->transform_lang_insert_block) | |
9ff420f1 | 613 | id->transform_lang_insert_block (new_block); |
1b369fae | 614 | |
d436bff8 | 615 | /* Remember the remapped block. */ |
6de9cd9a | 616 | insert_decl_map (id, old_block, new_block); |
d4e4baa9 AO |
617 | } |
618 | ||
acb8f212 JH |
619 | /* Copy the whole block tree and root it in id->block. */ |
620 | static tree | |
1b369fae | 621 | remap_blocks (tree block, copy_body_data *id) |
acb8f212 JH |
622 | { |
623 | tree t; | |
82d6e6fc | 624 | tree new_tree = block; |
acb8f212 JH |
625 | |
626 | if (!block) | |
627 | return NULL; | |
628 | ||
82d6e6fc KG |
629 | remap_block (&new_tree, id); |
630 | gcc_assert (new_tree != block); | |
acb8f212 | 631 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) |
4a283090 JH |
632 | prepend_lexical_block (new_tree, remap_blocks (t, id)); |
633 | /* Blocks are in arbitrary order, but make things slightly prettier and do | |
634 | not swap order when producing a copy. */ | |
635 | BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree)); | |
82d6e6fc | 636 | return new_tree; |
acb8f212 JH |
637 | } |
638 | ||
d4e4baa9 | 639 | static void |
6de9cd9a | 640 | copy_statement_list (tree *tp) |
d4e4baa9 | 641 | { |
6de9cd9a | 642 | tree_stmt_iterator oi, ni; |
82d6e6fc | 643 | tree new_tree; |
6de9cd9a | 644 | |
82d6e6fc KG |
645 | new_tree = alloc_stmt_list (); |
646 | ni = tsi_start (new_tree); | |
6de9cd9a | 647 | oi = tsi_start (*tp); |
b1d82db0 | 648 | TREE_TYPE (new_tree) = TREE_TYPE (*tp); |
82d6e6fc | 649 | *tp = new_tree; |
6de9cd9a DN |
650 | |
651 | for (; !tsi_end_p (oi); tsi_next (&oi)) | |
a406865a RG |
652 | { |
653 | tree stmt = tsi_stmt (oi); | |
654 | if (TREE_CODE (stmt) == STATEMENT_LIST) | |
655 | copy_statement_list (&stmt); | |
656 | tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING); | |
657 | } | |
6de9cd9a | 658 | } |
d4e4baa9 | 659 | |
6de9cd9a | 660 | static void |
1b369fae | 661 | copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id) |
6de9cd9a DN |
662 | { |
663 | tree block = BIND_EXPR_BLOCK (*tp); | |
d4e4baa9 AO |
664 | /* Copy (and replace) the statement. */ |
665 | copy_tree_r (tp, walk_subtrees, NULL); | |
6de9cd9a DN |
666 | if (block) |
667 | { | |
668 | remap_block (&block, id); | |
669 | BIND_EXPR_BLOCK (*tp) = block; | |
670 | } | |
d4e4baa9 | 671 | |
6de9cd9a DN |
672 | if (BIND_EXPR_VARS (*tp)) |
673 | /* This will remap a lot of the same decls again, but this should be | |
674 | harmless. */ | |
526d73ab | 675 | BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id); |
d4e4baa9 AO |
676 | } |
677 | ||
726a989a RB |
678 | |
679 | /* Create a new gimple_seq by remapping all the statements in BODY | |
680 | using the inlining information in ID. */ | |
681 | ||
682 | gimple_seq | |
683 | remap_gimple_seq (gimple_seq body, copy_body_data *id) | |
684 | { | |
685 | gimple_stmt_iterator si; | |
686 | gimple_seq new_body = NULL; | |
687 | ||
688 | for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si)) | |
689 | { | |
690 | gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id); | |
691 | gimple_seq_add_stmt (&new_body, new_stmt); | |
692 | } | |
693 | ||
694 | return new_body; | |
695 | } | |
696 | ||
697 | ||
698 | /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its | |
699 | block using the mapping information in ID. */ | |
700 | ||
701 | static gimple | |
702 | copy_gimple_bind (gimple stmt, copy_body_data *id) | |
703 | { | |
704 | gimple new_bind; | |
705 | tree new_block, new_vars; | |
706 | gimple_seq body, new_body; | |
707 | ||
708 | /* Copy the statement. Note that we purposely don't use copy_stmt | |
709 | here because we need to remap statements as we copy. */ | |
710 | body = gimple_bind_body (stmt); | |
711 | new_body = remap_gimple_seq (body, id); | |
712 | ||
713 | new_block = gimple_bind_block (stmt); | |
714 | if (new_block) | |
715 | remap_block (&new_block, id); | |
716 | ||
717 | /* This will remap a lot of the same decls again, but this should be | |
718 | harmless. */ | |
719 | new_vars = gimple_bind_vars (stmt); | |
720 | if (new_vars) | |
526d73ab | 721 | new_vars = remap_decls (new_vars, NULL, id); |
726a989a RB |
722 | |
723 | new_bind = gimple_build_bind (new_vars, new_body, new_block); | |
724 | ||
725 | return new_bind; | |
726 | } | |
727 | ||
728 | ||
729 | /* Remap the GIMPLE operand pointed to by *TP. DATA is really a | |
730 | 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'. | |
731 | WALK_SUBTREES is used to indicate walk_gimple_op whether to keep | |
732 | recursing into the children nodes of *TP. */ | |
733 | ||
734 | static tree | |
735 | remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data) | |
736 | { | |
737 | struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data; | |
738 | copy_body_data *id = (copy_body_data *) wi_p->info; | |
739 | tree fn = id->src_fn; | |
740 | ||
741 | if (TREE_CODE (*tp) == SSA_NAME) | |
742 | { | |
743 | *tp = remap_ssa_name (*tp, id); | |
744 | *walk_subtrees = 0; | |
745 | return NULL; | |
746 | } | |
747 | else if (auto_var_in_fn_p (*tp, fn)) | |
748 | { | |
749 | /* Local variables and labels need to be replaced by equivalent | |
750 | variables. We don't want to copy static variables; there's | |
751 | only one of those, no matter how many times we inline the | |
752 | containing function. Similarly for globals from an outer | |
753 | function. */ | |
754 | tree new_decl; | |
755 | ||
756 | /* Remap the declaration. */ | |
757 | new_decl = remap_decl (*tp, id); | |
758 | gcc_assert (new_decl); | |
759 | /* Replace this variable with the copy. */ | |
760 | STRIP_TYPE_NOPS (new_decl); | |
211ca15c RG |
761 | /* ??? The C++ frontend uses void * pointer zero to initialize |
762 | any other type. This confuses the middle-end type verification. | |
763 | As cloned bodies do not go through gimplification again the fixup | |
764 | there doesn't trigger. */ | |
765 | if (TREE_CODE (new_decl) == INTEGER_CST | |
766 | && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl))) | |
767 | new_decl = fold_convert (TREE_TYPE (*tp), new_decl); | |
726a989a RB |
768 | *tp = new_decl; |
769 | *walk_subtrees = 0; | |
770 | } | |
771 | else if (TREE_CODE (*tp) == STATEMENT_LIST) | |
772 | gcc_unreachable (); | |
773 | else if (TREE_CODE (*tp) == SAVE_EXPR) | |
774 | gcc_unreachable (); | |
775 | else if (TREE_CODE (*tp) == LABEL_DECL | |
776 | && (!DECL_CONTEXT (*tp) | |
777 | || decl_function_context (*tp) == id->src_fn)) | |
778 | /* These may need to be remapped for EH handling. */ | |
779 | *tp = remap_decl (*tp, id); | |
780 | else if (TYPE_P (*tp)) | |
781 | /* Types may need remapping as well. */ | |
782 | *tp = remap_type (*tp, id); | |
783 | else if (CONSTANT_CLASS_P (*tp)) | |
784 | { | |
785 | /* If this is a constant, we have to copy the node iff the type | |
786 | will be remapped. copy_tree_r will not copy a constant. */ | |
787 | tree new_type = remap_type (TREE_TYPE (*tp), id); | |
788 | ||
789 | if (new_type == TREE_TYPE (*tp)) | |
790 | *walk_subtrees = 0; | |
791 | ||
792 | else if (TREE_CODE (*tp) == INTEGER_CST) | |
793 | *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp), | |
794 | TREE_INT_CST_HIGH (*tp)); | |
795 | else | |
796 | { | |
797 | *tp = copy_node (*tp); | |
798 | TREE_TYPE (*tp) = new_type; | |
799 | } | |
800 | } | |
801 | else | |
802 | { | |
803 | /* Otherwise, just copy the node. Note that copy_tree_r already | |
804 | knows not to copy VAR_DECLs, etc., so this is safe. */ | |
805 | if (TREE_CODE (*tp) == INDIRECT_REF) | |
806 | { | |
807 | /* Get rid of *& from inline substitutions that can happen when a | |
808 | pointer argument is an ADDR_EXPR. */ | |
809 | tree decl = TREE_OPERAND (*tp, 0); | |
810 | tree *n; | |
811 | ||
812 | n = (tree *) pointer_map_contains (id->decl_map, decl); | |
813 | if (n) | |
814 | { | |
82d6e6fc | 815 | tree type, new_tree, old; |
726a989a RB |
816 | |
817 | /* If we happen to get an ADDR_EXPR in n->value, strip | |
818 | it manually here as we'll eventually get ADDR_EXPRs | |
819 | which lie about their types pointed to. In this case | |
820 | build_fold_indirect_ref wouldn't strip the | |
821 | INDIRECT_REF, but we absolutely rely on that. As | |
822 | fold_indirect_ref does other useful transformations, | |
823 | try that first, though. */ | |
824 | type = TREE_TYPE (TREE_TYPE (*n)); | |
82d6e6fc | 825 | new_tree = unshare_expr (*n); |
726a989a | 826 | old = *tp; |
82d6e6fc | 827 | *tp = gimple_fold_indirect_ref (new_tree); |
726a989a RB |
828 | if (!*tp) |
829 | { | |
82d6e6fc | 830 | if (TREE_CODE (new_tree) == ADDR_EXPR) |
726a989a | 831 | { |
db3927fb AH |
832 | *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree), |
833 | type, new_tree); | |
726a989a RB |
834 | /* ??? We should either assert here or build |
835 | a VIEW_CONVERT_EXPR instead of blindly leaking | |
836 | incompatible types to our IL. */ | |
837 | if (! *tp) | |
82d6e6fc | 838 | *tp = TREE_OPERAND (new_tree, 0); |
726a989a RB |
839 | } |
840 | else | |
841 | { | |
82d6e6fc | 842 | *tp = build1 (INDIRECT_REF, type, new_tree); |
726a989a | 843 | TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); |
ce1b6498 | 844 | TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old); |
726a989a RB |
845 | } |
846 | } | |
847 | *walk_subtrees = 0; | |
848 | return NULL; | |
849 | } | |
850 | } | |
851 | ||
852 | /* Here is the "usual case". Copy this tree node, and then | |
853 | tweak some special cases. */ | |
854 | copy_tree_r (tp, walk_subtrees, NULL); | |
855 | ||
856 | /* Global variables we haven't seen yet need to go into referenced | |
857 | vars. If not referenced from types only. */ | |
858 | if (gimple_in_ssa_p (cfun) | |
859 | && TREE_CODE (*tp) == VAR_DECL | |
b5b8b0ac AO |
860 | && id->remapping_type_depth == 0 |
861 | && !processing_debug_stmt) | |
726a989a RB |
862 | add_referenced_var (*tp); |
863 | ||
864 | /* We should never have TREE_BLOCK set on non-statements. */ | |
865 | if (EXPR_P (*tp)) | |
866 | gcc_assert (!TREE_BLOCK (*tp)); | |
867 | ||
868 | if (TREE_CODE (*tp) != OMP_CLAUSE) | |
869 | TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id); | |
870 | ||
871 | if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3)) | |
872 | { | |
873 | /* The copied TARGET_EXPR has never been expanded, even if the | |
874 | original node was expanded already. */ | |
875 | TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3); | |
876 | TREE_OPERAND (*tp, 3) = NULL_TREE; | |
877 | } | |
878 | else if (TREE_CODE (*tp) == ADDR_EXPR) | |
879 | { | |
880 | /* Variable substitution need not be simple. In particular, | |
881 | the INDIRECT_REF substitution above. Make sure that | |
882 | TREE_CONSTANT and friends are up-to-date. But make sure | |
883 | to not improperly set TREE_BLOCK on some sub-expressions. */ | |
884 | int invariant = is_gimple_min_invariant (*tp); | |
885 | tree block = id->block; | |
886 | id->block = NULL_TREE; | |
887 | walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL); | |
888 | id->block = block; | |
889 | ||
890 | /* Handle the case where we substituted an INDIRECT_REF | |
891 | into the operand of the ADDR_EXPR. */ | |
892 | if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF) | |
893 | *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0); | |
894 | else | |
895 | recompute_tree_invariant_for_addr_expr (*tp); | |
896 | ||
897 | /* If this used to be invariant, but is not any longer, | |
898 | then regimplification is probably needed. */ | |
899 | if (invariant && !is_gimple_min_invariant (*tp)) | |
900 | id->regimplify = true; | |
901 | ||
902 | *walk_subtrees = 0; | |
903 | } | |
904 | } | |
905 | ||
906 | /* Keep iterating. */ | |
907 | return NULL_TREE; | |
908 | } | |
909 | ||
910 | ||
911 | /* Called from copy_body_id via walk_tree. DATA is really a | |
1b369fae | 912 | `copy_body_data *'. */ |
aa4a53af | 913 | |
1b369fae | 914 | tree |
726a989a | 915 | copy_tree_body_r (tree *tp, int *walk_subtrees, void *data) |
d4e4baa9 | 916 | { |
1b369fae RH |
917 | copy_body_data *id = (copy_body_data *) data; |
918 | tree fn = id->src_fn; | |
acb8f212 | 919 | tree new_block; |
d4e4baa9 | 920 | |
e21aff8a SB |
921 | /* Begin by recognizing trees that we'll completely rewrite for the |
922 | inlining context. Our output for these trees is completely | |
923 | different from out input (e.g. RETURN_EXPR is deleted, and morphs | |
924 | into an edge). Further down, we'll handle trees that get | |
925 | duplicated and/or tweaked. */ | |
d4e4baa9 | 926 | |
1b369fae | 927 | /* When requested, RETURN_EXPRs should be transformed to just the |
726a989a | 928 | contained MODIFY_EXPR. The branch semantics of the return will |
1b369fae RH |
929 | be handled elsewhere by manipulating the CFG rather than a statement. */ |
930 | if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify) | |
d4e4baa9 | 931 | { |
e21aff8a | 932 | tree assignment = TREE_OPERAND (*tp, 0); |
d4e4baa9 AO |
933 | |
934 | /* If we're returning something, just turn that into an | |
e21aff8a SB |
935 | assignment into the equivalent of the original RESULT_DECL. |
936 | If the "assignment" is just the result decl, the result | |
937 | decl has already been set (e.g. a recent "foo (&result_decl, | |
938 | ...)"); just toss the entire RETURN_EXPR. */ | |
726a989a | 939 | if (assignment && TREE_CODE (assignment) == MODIFY_EXPR) |
e21aff8a SB |
940 | { |
941 | /* Replace the RETURN_EXPR with (a copy of) the | |
726a989a | 942 | MODIFY_EXPR hanging underneath. */ |
e21aff8a SB |
943 | *tp = copy_node (assignment); |
944 | } | |
945 | else /* Else the RETURN_EXPR returns no value. */ | |
946 | { | |
947 | *tp = NULL; | |
cceb1885 | 948 | return (tree) (void *)1; |
e21aff8a | 949 | } |
d4e4baa9 | 950 | } |
110cfe1c JH |
951 | else if (TREE_CODE (*tp) == SSA_NAME) |
952 | { | |
953 | *tp = remap_ssa_name (*tp, id); | |
954 | *walk_subtrees = 0; | |
955 | return NULL; | |
956 | } | |
e21aff8a | 957 | |
d4e4baa9 AO |
958 | /* Local variables and labels need to be replaced by equivalent |
959 | variables. We don't want to copy static variables; there's only | |
960 | one of those, no matter how many times we inline the containing | |
5377d5ba | 961 | function. Similarly for globals from an outer function. */ |
50886bf1 | 962 | else if (auto_var_in_fn_p (*tp, fn)) |
d4e4baa9 AO |
963 | { |
964 | tree new_decl; | |
965 | ||
966 | /* Remap the declaration. */ | |
967 | new_decl = remap_decl (*tp, id); | |
1e128c5f | 968 | gcc_assert (new_decl); |
d4e4baa9 AO |
969 | /* Replace this variable with the copy. */ |
970 | STRIP_TYPE_NOPS (new_decl); | |
971 | *tp = new_decl; | |
e4cf29ae | 972 | *walk_subtrees = 0; |
d4e4baa9 | 973 | } |
6de9cd9a DN |
974 | else if (TREE_CODE (*tp) == STATEMENT_LIST) |
975 | copy_statement_list (tp); | |
a406865a RG |
976 | else if (TREE_CODE (*tp) == SAVE_EXPR |
977 | || TREE_CODE (*tp) == TARGET_EXPR) | |
82c82743 | 978 | remap_save_expr (tp, id->decl_map, walk_subtrees); |
17acc01a JH |
979 | else if (TREE_CODE (*tp) == LABEL_DECL |
980 | && (! DECL_CONTEXT (*tp) | |
1b369fae | 981 | || decl_function_context (*tp) == id->src_fn)) |
e21aff8a | 982 | /* These may need to be remapped for EH handling. */ |
17acc01a | 983 | *tp = remap_decl (*tp, id); |
6de9cd9a DN |
984 | else if (TREE_CODE (*tp) == BIND_EXPR) |
985 | copy_bind_expr (tp, walk_subtrees, id); | |
3c2a7a6a RH |
986 | /* Types may need remapping as well. */ |
987 | else if (TYPE_P (*tp)) | |
988 | *tp = remap_type (*tp, id); | |
989 | ||
bb04998a RK |
990 | /* If this is a constant, we have to copy the node iff the type will be |
991 | remapped. copy_tree_r will not copy a constant. */ | |
3cf11075 | 992 | else if (CONSTANT_CLASS_P (*tp)) |
bb04998a RK |
993 | { |
994 | tree new_type = remap_type (TREE_TYPE (*tp), id); | |
995 | ||
996 | if (new_type == TREE_TYPE (*tp)) | |
997 | *walk_subtrees = 0; | |
998 | ||
999 | else if (TREE_CODE (*tp) == INTEGER_CST) | |
1000 | *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp), | |
1001 | TREE_INT_CST_HIGH (*tp)); | |
1002 | else | |
1003 | { | |
1004 | *tp = copy_node (*tp); | |
1005 | TREE_TYPE (*tp) = new_type; | |
1006 | } | |
1007 | } | |
1008 | ||
d4e4baa9 AO |
1009 | /* Otherwise, just copy the node. Note that copy_tree_r already |
1010 | knows not to copy VAR_DECLs, etc., so this is safe. */ | |
1011 | else | |
1012 | { | |
e21aff8a SB |
1013 | /* Here we handle trees that are not completely rewritten. |
1014 | First we detect some inlining-induced bogosities for | |
1015 | discarding. */ | |
726a989a RB |
1016 | if (TREE_CODE (*tp) == MODIFY_EXPR |
1017 | && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1) | |
1018 | && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn))) | |
d4e4baa9 AO |
1019 | { |
1020 | /* Some assignments VAR = VAR; don't generate any rtl code | |
1021 | and thus don't count as variable modification. Avoid | |
1022 | keeping bogosities like 0 = 0. */ | |
726a989a | 1023 | tree decl = TREE_OPERAND (*tp, 0), value; |
6be42dd4 | 1024 | tree *n; |
d4e4baa9 | 1025 | |
6be42dd4 | 1026 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
d4e4baa9 AO |
1027 | if (n) |
1028 | { | |
6be42dd4 | 1029 | value = *n; |
d4e4baa9 | 1030 | STRIP_TYPE_NOPS (value); |
becfd6e5 | 1031 | if (TREE_CONSTANT (value) || TREE_READONLY (value)) |
68594ce7 | 1032 | { |
c2255bc4 | 1033 | *tp = build_empty_stmt (EXPR_LOCATION (*tp)); |
726a989a | 1034 | return copy_tree_body_r (tp, walk_subtrees, data); |
68594ce7 | 1035 | } |
d4e4baa9 AO |
1036 | } |
1037 | } | |
1b369fae | 1038 | else if (TREE_CODE (*tp) == INDIRECT_REF) |
6de9cd9a DN |
1039 | { |
1040 | /* Get rid of *& from inline substitutions that can happen when a | |
1041 | pointer argument is an ADDR_EXPR. */ | |
81cfbbc2 | 1042 | tree decl = TREE_OPERAND (*tp, 0); |
6be42dd4 | 1043 | tree *n; |
6de9cd9a | 1044 | |
6be42dd4 | 1045 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
6de9cd9a DN |
1046 | if (n) |
1047 | { | |
82d6e6fc | 1048 | tree new_tree; |
d84b37b0 | 1049 | tree old; |
30d2e943 RG |
1050 | /* If we happen to get an ADDR_EXPR in n->value, strip |
1051 | it manually here as we'll eventually get ADDR_EXPRs | |
1052 | which lie about their types pointed to. In this case | |
1053 | build_fold_indirect_ref wouldn't strip the INDIRECT_REF, | |
095ecc24 RG |
1054 | but we absolutely rely on that. As fold_indirect_ref |
1055 | does other useful transformations, try that first, though. */ | |
6be42dd4 | 1056 | tree type = TREE_TYPE (TREE_TYPE (*n)); |
f82a627c EB |
1057 | if (id->do_not_unshare) |
1058 | new_tree = *n; | |
1059 | else | |
1060 | new_tree = unshare_expr (*n); | |
d84b37b0 | 1061 | old = *tp; |
82d6e6fc | 1062 | *tp = gimple_fold_indirect_ref (new_tree); |
095ecc24 RG |
1063 | if (! *tp) |
1064 | { | |
82d6e6fc | 1065 | if (TREE_CODE (new_tree) == ADDR_EXPR) |
de4af523 | 1066 | { |
db3927fb AH |
1067 | *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree), |
1068 | type, new_tree); | |
de4af523 JJ |
1069 | /* ??? We should either assert here or build |
1070 | a VIEW_CONVERT_EXPR instead of blindly leaking | |
1071 | incompatible types to our IL. */ | |
1072 | if (! *tp) | |
82d6e6fc | 1073 | *tp = TREE_OPERAND (new_tree, 0); |
de4af523 | 1074 | } |
095ecc24 | 1075 | else |
d84b37b0 | 1076 | { |
82d6e6fc | 1077 | *tp = build1 (INDIRECT_REF, type, new_tree); |
d84b37b0 | 1078 | TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); |
955f6531 | 1079 | TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old); |
d84b37b0 | 1080 | } |
095ecc24 | 1081 | } |
81cfbbc2 JH |
1082 | *walk_subtrees = 0; |
1083 | return NULL; | |
68594ce7 JM |
1084 | } |
1085 | } | |
1086 | ||
e21aff8a SB |
1087 | /* Here is the "usual case". Copy this tree node, and then |
1088 | tweak some special cases. */ | |
1b369fae | 1089 | copy_tree_r (tp, walk_subtrees, NULL); |
110cfe1c | 1090 | |
4f5c64b8 | 1091 | /* Global variables we haven't seen yet needs to go into referenced |
b5b8b0ac | 1092 | vars. If not referenced from types or debug stmts only. */ |
726a989a RB |
1093 | if (gimple_in_ssa_p (cfun) |
1094 | && TREE_CODE (*tp) == VAR_DECL | |
b5b8b0ac AO |
1095 | && id->remapping_type_depth == 0 |
1096 | && !processing_debug_stmt) | |
110cfe1c | 1097 | add_referenced_var (*tp); |
19734dd8 | 1098 | |
acb8f212 JH |
1099 | /* If EXPR has block defined, map it to newly constructed block. |
1100 | When inlining we want EXPRs without block appear in the block | |
1101 | of function call. */ | |
726a989a | 1102 | if (EXPR_P (*tp)) |
acb8f212 JH |
1103 | { |
1104 | new_block = id->block; | |
1105 | if (TREE_BLOCK (*tp)) | |
1106 | { | |
6be42dd4 RG |
1107 | tree *n; |
1108 | n = (tree *) pointer_map_contains (id->decl_map, | |
1109 | TREE_BLOCK (*tp)); | |
acb8f212 | 1110 | gcc_assert (n); |
6be42dd4 | 1111 | new_block = *n; |
acb8f212 JH |
1112 | } |
1113 | TREE_BLOCK (*tp) = new_block; | |
1114 | } | |
68594ce7 | 1115 | |
726a989a | 1116 | if (TREE_CODE (*tp) != OMP_CLAUSE) |
07beea0d | 1117 | TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id); |
3c2a7a6a | 1118 | |
68594ce7 JM |
1119 | /* The copied TARGET_EXPR has never been expanded, even if the |
1120 | original node was expanded already. */ | |
1121 | if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3)) | |
1122 | { | |
1123 | TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3); | |
1124 | TREE_OPERAND (*tp, 3) = NULL_TREE; | |
1125 | } | |
84cce55d RH |
1126 | |
1127 | /* Variable substitution need not be simple. In particular, the | |
1128 | INDIRECT_REF substitution above. Make sure that TREE_CONSTANT | |
1129 | and friends are up-to-date. */ | |
1130 | else if (TREE_CODE (*tp) == ADDR_EXPR) | |
1131 | { | |
ad6003f2 | 1132 | int invariant = is_gimple_min_invariant (*tp); |
726a989a RB |
1133 | walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL); |
1134 | ||
8e85fd14 RG |
1135 | /* Handle the case where we substituted an INDIRECT_REF |
1136 | into the operand of the ADDR_EXPR. */ | |
1137 | if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF) | |
1138 | *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0); | |
1139 | else | |
1140 | recompute_tree_invariant_for_addr_expr (*tp); | |
726a989a | 1141 | |
416c991f JJ |
1142 | /* If this used to be invariant, but is not any longer, |
1143 | then regimplification is probably needed. */ | |
ad6003f2 | 1144 | if (invariant && !is_gimple_min_invariant (*tp)) |
416c991f | 1145 | id->regimplify = true; |
726a989a | 1146 | |
84cce55d RH |
1147 | *walk_subtrees = 0; |
1148 | } | |
d4e4baa9 AO |
1149 | } |
1150 | ||
1151 | /* Keep iterating. */ | |
1152 | return NULL_TREE; | |
1153 | } | |
1154 | ||
1d65f45c RH |
1155 | /* Helper for remap_gimple_stmt. Given an EH region number for the |
1156 | source function, map that to the duplicate EH region number in | |
1157 | the destination function. */ | |
1158 | ||
1159 | static int | |
1160 | remap_eh_region_nr (int old_nr, copy_body_data *id) | |
1161 | { | |
1162 | eh_region old_r, new_r; | |
1163 | void **slot; | |
1164 | ||
1165 | old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr); | |
1166 | slot = pointer_map_contains (id->eh_map, old_r); | |
1167 | new_r = (eh_region) *slot; | |
1168 | ||
1169 | return new_r->index; | |
1170 | } | |
1171 | ||
1172 | /* Similar, but operate on INTEGER_CSTs. */ | |
1173 | ||
1174 | static tree | |
1175 | remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id) | |
1176 | { | |
1177 | int old_nr, new_nr; | |
1178 | ||
1179 | old_nr = tree_low_cst (old_t_nr, 0); | |
1180 | new_nr = remap_eh_region_nr (old_nr, id); | |
1181 | ||
1182 | return build_int_cst (NULL, new_nr); | |
1183 | } | |
726a989a RB |
1184 | |
1185 | /* Helper for copy_bb. Remap statement STMT using the inlining | |
1186 | information in ID. Return the new statement copy. */ | |
1187 | ||
1188 | static gimple | |
1189 | remap_gimple_stmt (gimple stmt, copy_body_data *id) | |
1190 | { | |
1191 | gimple copy = NULL; | |
1192 | struct walk_stmt_info wi; | |
1193 | tree new_block; | |
5a6e26b7 | 1194 | bool skip_first = false; |
726a989a RB |
1195 | |
1196 | /* Begin by recognizing trees that we'll completely rewrite for the | |
1197 | inlining context. Our output for these trees is completely | |
1198 | different from out input (e.g. RETURN_EXPR is deleted, and morphs | |
1199 | into an edge). Further down, we'll handle trees that get | |
1200 | duplicated and/or tweaked. */ | |
1201 | ||
1202 | /* When requested, GIMPLE_RETURNs should be transformed to just the | |
1203 | contained GIMPLE_ASSIGN. The branch semantics of the return will | |
1204 | be handled elsewhere by manipulating the CFG rather than the | |
1205 | statement. */ | |
1206 | if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify) | |
1207 | { | |
1208 | tree retval = gimple_return_retval (stmt); | |
1209 | ||
1210 | /* If we're returning something, just turn that into an | |
1211 | assignment into the equivalent of the original RESULT_DECL. | |
1212 | If RETVAL is just the result decl, the result decl has | |
1213 | already been set (e.g. a recent "foo (&result_decl, ...)"); | |
1214 | just toss the entire GIMPLE_RETURN. */ | |
1215 | if (retval && TREE_CODE (retval) != RESULT_DECL) | |
5a6e26b7 JH |
1216 | { |
1217 | copy = gimple_build_assign (id->retvar, retval); | |
1218 | /* id->retvar is already substituted. Skip it on later remapping. */ | |
1219 | skip_first = true; | |
1220 | } | |
726a989a RB |
1221 | else |
1222 | return gimple_build_nop (); | |
1223 | } | |
1224 | else if (gimple_has_substatements (stmt)) | |
1225 | { | |
1226 | gimple_seq s1, s2; | |
1227 | ||
1228 | /* When cloning bodies from the C++ front end, we will be handed bodies | |
1229 | in High GIMPLE form. Handle here all the High GIMPLE statements that | |
1230 | have embedded statements. */ | |
1231 | switch (gimple_code (stmt)) | |
1232 | { | |
1233 | case GIMPLE_BIND: | |
1234 | copy = copy_gimple_bind (stmt, id); | |
1235 | break; | |
1236 | ||
1237 | case GIMPLE_CATCH: | |
1238 | s1 = remap_gimple_seq (gimple_catch_handler (stmt), id); | |
1239 | copy = gimple_build_catch (gimple_catch_types (stmt), s1); | |
1240 | break; | |
1241 | ||
1242 | case GIMPLE_EH_FILTER: | |
1243 | s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id); | |
1244 | copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1); | |
1245 | break; | |
1246 | ||
1247 | case GIMPLE_TRY: | |
1248 | s1 = remap_gimple_seq (gimple_try_eval (stmt), id); | |
1249 | s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id); | |
1250 | copy = gimple_build_try (s1, s2, gimple_try_kind (stmt)); | |
1251 | break; | |
1252 | ||
1253 | case GIMPLE_WITH_CLEANUP_EXPR: | |
1254 | s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id); | |
1255 | copy = gimple_build_wce (s1); | |
1256 | break; | |
1257 | ||
1258 | case GIMPLE_OMP_PARALLEL: | |
1259 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1260 | copy = gimple_build_omp_parallel | |
1261 | (s1, | |
1262 | gimple_omp_parallel_clauses (stmt), | |
1263 | gimple_omp_parallel_child_fn (stmt), | |
1264 | gimple_omp_parallel_data_arg (stmt)); | |
1265 | break; | |
1266 | ||
1267 | case GIMPLE_OMP_TASK: | |
1268 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1269 | copy = gimple_build_omp_task | |
1270 | (s1, | |
1271 | gimple_omp_task_clauses (stmt), | |
1272 | gimple_omp_task_child_fn (stmt), | |
1273 | gimple_omp_task_data_arg (stmt), | |
1274 | gimple_omp_task_copy_fn (stmt), | |
1275 | gimple_omp_task_arg_size (stmt), | |
1276 | gimple_omp_task_arg_align (stmt)); | |
1277 | break; | |
1278 | ||
1279 | case GIMPLE_OMP_FOR: | |
1280 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1281 | s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id); | |
1282 | copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt), | |
1283 | gimple_omp_for_collapse (stmt), s2); | |
1284 | { | |
1285 | size_t i; | |
1286 | for (i = 0; i < gimple_omp_for_collapse (stmt); i++) | |
1287 | { | |
1288 | gimple_omp_for_set_index (copy, i, | |
1289 | gimple_omp_for_index (stmt, i)); | |
1290 | gimple_omp_for_set_initial (copy, i, | |
1291 | gimple_omp_for_initial (stmt, i)); | |
1292 | gimple_omp_for_set_final (copy, i, | |
1293 | gimple_omp_for_final (stmt, i)); | |
1294 | gimple_omp_for_set_incr (copy, i, | |
1295 | gimple_omp_for_incr (stmt, i)); | |
1296 | gimple_omp_for_set_cond (copy, i, | |
1297 | gimple_omp_for_cond (stmt, i)); | |
1298 | } | |
1299 | } | |
1300 | break; | |
1301 | ||
1302 | case GIMPLE_OMP_MASTER: | |
1303 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1304 | copy = gimple_build_omp_master (s1); | |
1305 | break; | |
1306 | ||
1307 | case GIMPLE_OMP_ORDERED: | |
1308 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1309 | copy = gimple_build_omp_ordered (s1); | |
1310 | break; | |
1311 | ||
1312 | case GIMPLE_OMP_SECTION: | |
1313 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1314 | copy = gimple_build_omp_section (s1); | |
1315 | break; | |
1316 | ||
1317 | case GIMPLE_OMP_SECTIONS: | |
1318 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1319 | copy = gimple_build_omp_sections | |
1320 | (s1, gimple_omp_sections_clauses (stmt)); | |
1321 | break; | |
1322 | ||
1323 | case GIMPLE_OMP_SINGLE: | |
1324 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1325 | copy = gimple_build_omp_single | |
1326 | (s1, gimple_omp_single_clauses (stmt)); | |
1327 | break; | |
1328 | ||
05a26161 JJ |
1329 | case GIMPLE_OMP_CRITICAL: |
1330 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1331 | copy | |
1332 | = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt)); | |
1333 | break; | |
1334 | ||
726a989a RB |
1335 | default: |
1336 | gcc_unreachable (); | |
1337 | } | |
1338 | } | |
1339 | else | |
1340 | { | |
1341 | if (gimple_assign_copy_p (stmt) | |
1342 | && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt) | |
1343 | && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn)) | |
1344 | { | |
1345 | /* Here we handle statements that are not completely rewritten. | |
1346 | First we detect some inlining-induced bogosities for | |
1347 | discarding. */ | |
1348 | ||
1349 | /* Some assignments VAR = VAR; don't generate any rtl code | |
1350 | and thus don't count as variable modification. Avoid | |
1351 | keeping bogosities like 0 = 0. */ | |
1352 | tree decl = gimple_assign_lhs (stmt), value; | |
1353 | tree *n; | |
1354 | ||
1355 | n = (tree *) pointer_map_contains (id->decl_map, decl); | |
1356 | if (n) | |
1357 | { | |
1358 | value = *n; | |
1359 | STRIP_TYPE_NOPS (value); | |
1360 | if (TREE_CONSTANT (value) || TREE_READONLY (value)) | |
1361 | return gimple_build_nop (); | |
1362 | } | |
1363 | } | |
1364 | ||
b5b8b0ac AO |
1365 | if (gimple_debug_bind_p (stmt)) |
1366 | { | |
1367 | copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt), | |
1368 | gimple_debug_bind_get_value (stmt), | |
1369 | stmt); | |
1370 | VEC_safe_push (gimple, heap, id->debug_stmts, copy); | |
1371 | return copy; | |
1372 | } | |
1d65f45c RH |
1373 | |
1374 | /* Create a new deep copy of the statement. */ | |
1375 | copy = gimple_copy (stmt); | |
1376 | ||
1377 | /* Remap the region numbers for __builtin_eh_{pointer,filter}, | |
1378 | RESX and EH_DISPATCH. */ | |
1379 | if (id->eh_map) | |
1380 | switch (gimple_code (copy)) | |
1381 | { | |
1382 | case GIMPLE_CALL: | |
1383 | { | |
1384 | tree r, fndecl = gimple_call_fndecl (copy); | |
1385 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
1386 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1387 | { | |
1388 | case BUILT_IN_EH_COPY_VALUES: | |
1389 | r = gimple_call_arg (copy, 1); | |
1390 | r = remap_eh_region_tree_nr (r, id); | |
1391 | gimple_call_set_arg (copy, 1, r); | |
1392 | /* FALLTHRU */ | |
1393 | ||
1394 | case BUILT_IN_EH_POINTER: | |
1395 | case BUILT_IN_EH_FILTER: | |
1396 | r = gimple_call_arg (copy, 0); | |
1397 | r = remap_eh_region_tree_nr (r, id); | |
1398 | gimple_call_set_arg (copy, 0, r); | |
1399 | break; | |
1400 | ||
1401 | default: | |
1402 | break; | |
1403 | } | |
1404 | } | |
1405 | break; | |
1406 | ||
1407 | case GIMPLE_RESX: | |
1408 | { | |
1409 | int r = gimple_resx_region (copy); | |
1410 | r = remap_eh_region_nr (r, id); | |
1411 | gimple_resx_set_region (copy, r); | |
1412 | } | |
1413 | break; | |
1414 | ||
1415 | case GIMPLE_EH_DISPATCH: | |
1416 | { | |
1417 | int r = gimple_eh_dispatch_region (copy); | |
1418 | r = remap_eh_region_nr (r, id); | |
1419 | gimple_eh_dispatch_set_region (copy, r); | |
1420 | } | |
1421 | break; | |
1422 | ||
1423 | default: | |
1424 | break; | |
1425 | } | |
726a989a RB |
1426 | } |
1427 | ||
1428 | /* If STMT has a block defined, map it to the newly constructed | |
1429 | block. When inlining we want statements without a block to | |
1430 | appear in the block of the function call. */ | |
1431 | new_block = id->block; | |
1432 | if (gimple_block (copy)) | |
1433 | { | |
1434 | tree *n; | |
1435 | n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy)); | |
1436 | gcc_assert (n); | |
1437 | new_block = *n; | |
1438 | } | |
1439 | ||
1440 | gimple_set_block (copy, new_block); | |
1441 | ||
b5b8b0ac AO |
1442 | if (gimple_debug_bind_p (copy)) |
1443 | return copy; | |
1444 | ||
726a989a RB |
1445 | /* Remap all the operands in COPY. */ |
1446 | memset (&wi, 0, sizeof (wi)); | |
1447 | wi.info = id; | |
5a6e26b7 JH |
1448 | if (skip_first) |
1449 | walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL); | |
1450 | else | |
1451 | walk_gimple_op (copy, remap_gimple_op_r, &wi); | |
726a989a | 1452 | |
5006671f RG |
1453 | /* Clear the copied virtual operands. We are not remapping them here |
1454 | but are going to recreate them from scratch. */ | |
1455 | if (gimple_has_mem_ops (copy)) | |
1456 | { | |
1457 | gimple_set_vdef (copy, NULL_TREE); | |
1458 | gimple_set_vuse (copy, NULL_TREE); | |
1459 | } | |
1460 | ||
726a989a RB |
1461 | return copy; |
1462 | } | |
1463 | ||
1464 | ||
e21aff8a SB |
1465 | /* Copy basic block, scale profile accordingly. Edges will be taken care of |
1466 | later */ | |
1467 | ||
1468 | static basic_block | |
0178d644 VR |
1469 | copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, |
1470 | gcov_type count_scale) | |
e21aff8a | 1471 | { |
c2a4718a | 1472 | gimple_stmt_iterator gsi, copy_gsi, seq_gsi; |
e21aff8a | 1473 | basic_block copy_basic_block; |
726a989a | 1474 | tree decl; |
e21aff8a SB |
1475 | |
1476 | /* create_basic_block() will append every new block to | |
1477 | basic_block_info automatically. */ | |
cceb1885 GDR |
1478 | copy_basic_block = create_basic_block (NULL, (void *) 0, |
1479 | (basic_block) bb->prev_bb->aux); | |
e21aff8a | 1480 | copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE; |
45a80bb9 | 1481 | |
726a989a RB |
1482 | /* We are going to rebuild frequencies from scratch. These values |
1483 | have just small importance to drive canonicalize_loop_headers. */ | |
45a80bb9 | 1484 | copy_basic_block->frequency = ((gcov_type)bb->frequency |
726a989a RB |
1485 | * frequency_scale / REG_BR_PROB_BASE); |
1486 | ||
45a80bb9 JH |
1487 | if (copy_basic_block->frequency > BB_FREQ_MAX) |
1488 | copy_basic_block->frequency = BB_FREQ_MAX; | |
e21aff8a | 1489 | |
726a989a RB |
1490 | copy_gsi = gsi_start_bb (copy_basic_block); |
1491 | ||
1492 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
e21aff8a | 1493 | { |
726a989a RB |
1494 | gimple stmt = gsi_stmt (gsi); |
1495 | gimple orig_stmt = stmt; | |
e21aff8a | 1496 | |
416c991f | 1497 | id->regimplify = false; |
726a989a RB |
1498 | stmt = remap_gimple_stmt (stmt, id); |
1499 | if (gimple_nop_p (stmt)) | |
1500 | continue; | |
1501 | ||
1502 | gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt); | |
c2a4718a | 1503 | seq_gsi = copy_gsi; |
726a989a RB |
1504 | |
1505 | /* With return slot optimization we can end up with | |
1506 | non-gimple (foo *)&this->m, fix that here. */ | |
4c29307d JJ |
1507 | if (is_gimple_assign (stmt) |
1508 | && gimple_assign_rhs_code (stmt) == NOP_EXPR | |
1509 | && !is_gimple_val (gimple_assign_rhs1 (stmt))) | |
e21aff8a | 1510 | { |
726a989a | 1511 | tree new_rhs; |
c2a4718a | 1512 | new_rhs = force_gimple_operand_gsi (&seq_gsi, |
4a2b7f24 JJ |
1513 | gimple_assign_rhs1 (stmt), |
1514 | true, NULL, false, GSI_NEW_STMT); | |
726a989a | 1515 | gimple_assign_set_rhs1 (stmt, new_rhs); |
c2a4718a | 1516 | id->regimplify = false; |
726a989a | 1517 | } |
2b65dae5 | 1518 | |
c2a4718a JJ |
1519 | gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT); |
1520 | ||
1521 | if (id->regimplify) | |
1522 | gimple_regimplify_operands (stmt, &seq_gsi); | |
1523 | ||
1524 | /* If copy_basic_block has been empty at the start of this iteration, | |
1525 | call gsi_start_bb again to get at the newly added statements. */ | |
1526 | if (gsi_end_p (copy_gsi)) | |
1527 | copy_gsi = gsi_start_bb (copy_basic_block); | |
1528 | else | |
1529 | gsi_next (©_gsi); | |
110cfe1c | 1530 | |
726a989a RB |
1531 | /* Process the new statement. The call to gimple_regimplify_operands |
1532 | possibly turned the statement into multiple statements, we | |
1533 | need to process all of them. */ | |
c2a4718a | 1534 | do |
726a989a | 1535 | { |
9187e02d JH |
1536 | tree fn; |
1537 | ||
c2a4718a | 1538 | stmt = gsi_stmt (copy_gsi); |
726a989a RB |
1539 | if (is_gimple_call (stmt) |
1540 | && gimple_call_va_arg_pack_p (stmt) | |
1541 | && id->gimple_call) | |
1542 | { | |
1543 | /* __builtin_va_arg_pack () should be replaced by | |
1544 | all arguments corresponding to ... in the caller. */ | |
1545 | tree p; | |
1546 | gimple new_call; | |
1547 | VEC(tree, heap) *argarray; | |
1548 | size_t nargs = gimple_call_num_args (id->gimple_call); | |
1549 | size_t n; | |
1550 | ||
1551 | for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p)) | |
1552 | nargs--; | |
1553 | ||
1554 | /* Create the new array of arguments. */ | |
1555 | n = nargs + gimple_call_num_args (stmt); | |
1556 | argarray = VEC_alloc (tree, heap, n); | |
1557 | VEC_safe_grow (tree, heap, argarray, n); | |
1558 | ||
1559 | /* Copy all the arguments before '...' */ | |
1560 | memcpy (VEC_address (tree, argarray), | |
1561 | gimple_call_arg_ptr (stmt, 0), | |
1562 | gimple_call_num_args (stmt) * sizeof (tree)); | |
1563 | ||
1564 | /* Append the arguments passed in '...' */ | |
1565 | memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt), | |
1566 | gimple_call_arg_ptr (id->gimple_call, 0) | |
1567 | + (gimple_call_num_args (id->gimple_call) - nargs), | |
1568 | nargs * sizeof (tree)); | |
1569 | ||
1570 | new_call = gimple_build_call_vec (gimple_call_fn (stmt), | |
1571 | argarray); | |
1572 | ||
1573 | VEC_free (tree, heap, argarray); | |
1574 | ||
1575 | /* Copy all GIMPLE_CALL flags, location and block, except | |
1576 | GF_CALL_VA_ARG_PACK. */ | |
1577 | gimple_call_copy_flags (new_call, stmt); | |
1578 | gimple_call_set_va_arg_pack (new_call, false); | |
1579 | gimple_set_location (new_call, gimple_location (stmt)); | |
1580 | gimple_set_block (new_call, gimple_block (stmt)); | |
1581 | gimple_call_set_lhs (new_call, gimple_call_lhs (stmt)); | |
1582 | ||
1583 | gsi_replace (©_gsi, new_call, false); | |
9cfa22be | 1584 | gimple_set_bb (stmt, NULL); |
726a989a RB |
1585 | stmt = new_call; |
1586 | } | |
1587 | else if (is_gimple_call (stmt) | |
1588 | && id->gimple_call | |
1589 | && (decl = gimple_call_fndecl (stmt)) | |
1590 | && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL | |
1591 | && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN) | |
e0704a46 | 1592 | { |
726a989a RB |
1593 | /* __builtin_va_arg_pack_len () should be replaced by |
1594 | the number of anonymous arguments. */ | |
1595 | size_t nargs = gimple_call_num_args (id->gimple_call); | |
1596 | tree count, p; | |
1597 | gimple new_stmt; | |
1598 | ||
1599 | for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p)) | |
1600 | nargs--; | |
1601 | ||
1602 | count = build_int_cst (integer_type_node, nargs); | |
1603 | new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count); | |
1604 | gsi_replace (©_gsi, new_stmt, false); | |
1605 | stmt = new_stmt; | |
1606 | } | |
b8a00a4d | 1607 | |
726a989a RB |
1608 | /* Statements produced by inlining can be unfolded, especially |
1609 | when we constant propagated some operands. We can't fold | |
1610 | them right now for two reasons: | |
1611 | 1) folding require SSA_NAME_DEF_STMTs to be correct | |
1612 | 2) we can't change function calls to builtins. | |
1613 | So we just mark statement for later folding. We mark | |
1614 | all new statements, instead just statements that has changed | |
1615 | by some nontrivial substitution so even statements made | |
1616 | foldable indirectly are updated. If this turns out to be | |
1617 | expensive, copy_body can be told to watch for nontrivial | |
1618 | changes. */ | |
1619 | if (id->statements_to_fold) | |
1620 | pointer_set_insert (id->statements_to_fold, stmt); | |
1621 | ||
1622 | /* We're duplicating a CALL_EXPR. Find any corresponding | |
1623 | callgraph edges and update or duplicate them. */ | |
1624 | if (is_gimple_call (stmt)) | |
1625 | { | |
9b2a5ef7 | 1626 | struct cgraph_edge *edge; |
f618d33e | 1627 | int flags; |
6ef5231b | 1628 | |
726a989a | 1629 | switch (id->transform_call_graph_edges) |
e0704a46 | 1630 | { |
9b2a5ef7 RH |
1631 | case CB_CGE_DUPLICATE: |
1632 | edge = cgraph_edge (id->src_node, orig_stmt); | |
1633 | if (edge) | |
1634 | edge = cgraph_clone_edge (edge, id->dst_node, stmt, | |
d7f09764 | 1635 | gimple_uid (stmt), |
9b2a5ef7 RH |
1636 | REG_BR_PROB_BASE, 1, |
1637 | edge->frequency, true); | |
1638 | break; | |
1639 | ||
1640 | case CB_CGE_MOVE_CLONES: | |
1641 | cgraph_set_call_stmt_including_clones (id->dst_node, | |
1642 | orig_stmt, stmt); | |
1643 | edge = cgraph_edge (id->dst_node, stmt); | |
1644 | break; | |
1645 | ||
1646 | case CB_CGE_MOVE: | |
1647 | edge = cgraph_edge (id->dst_node, orig_stmt); | |
1648 | if (edge) | |
1649 | cgraph_set_call_stmt (edge, stmt); | |
1650 | break; | |
1651 | ||
1652 | default: | |
1653 | gcc_unreachable (); | |
110cfe1c | 1654 | } |
f618d33e | 1655 | |
9b2a5ef7 RH |
1656 | /* Constant propagation on argument done during inlining |
1657 | may create new direct call. Produce an edge for it. */ | |
1658 | if ((!edge | |
1659 | || (edge->indirect_call | |
1660 | && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)) | |
1661 | && is_gimple_call (stmt) | |
1662 | && (fn = gimple_call_fndecl (stmt)) != NULL) | |
1663 | { | |
1664 | struct cgraph_node *dest = cgraph_node (fn); | |
1665 | ||
1666 | /* We have missing edge in the callgraph. This can happen | |
1667 | when previous inlining turned an indirect call into a | |
1668 | direct call by constant propagating arguments. In all | |
1669 | other cases we hit a bug (incorrect node sharing is the | |
1670 | most common reason for missing edges). */ | |
1671 | gcc_assert (dest->needed || !dest->analyzed); | |
1672 | if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES) | |
1673 | cgraph_create_edge_including_clones | |
1674 | (id->dst_node, dest, stmt, bb->count, | |
1675 | compute_call_stmt_bb_frequency (id->dst_node->decl, bb), | |
1676 | bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL); | |
1677 | else | |
1678 | cgraph_create_edge (id->dst_node, dest, stmt, | |
1679 | bb->count, CGRAPH_FREQ_BASE, | |
1680 | bb->loop_depth)->inline_failed | |
1681 | = CIF_ORIGINALLY_INDIRECT_CALL; | |
1682 | if (dump_file) | |
1683 | { | |
1684 | fprintf (dump_file, "Created new direct edge to %s", | |
1685 | cgraph_node_name (dest)); | |
1686 | } | |
1687 | } | |
9187e02d | 1688 | |
f618d33e | 1689 | flags = gimple_call_flags (stmt); |
f618d33e MJ |
1690 | if (flags & ECF_MAY_BE_ALLOCA) |
1691 | cfun->calls_alloca = true; | |
1692 | if (flags & ECF_RETURNS_TWICE) | |
1693 | cfun->calls_setjmp = true; | |
726a989a | 1694 | } |
e21aff8a | 1695 | |
1d65f45c RH |
1696 | maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt, |
1697 | id->eh_map, id->eh_lp_nr); | |
726a989a | 1698 | |
b5b8b0ac | 1699 | if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt)) |
726a989a RB |
1700 | { |
1701 | ssa_op_iter i; | |
1702 | tree def; | |
1703 | ||
1704 | find_new_referenced_vars (gsi_stmt (copy_gsi)); | |
1705 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF) | |
1706 | if (TREE_CODE (def) == SSA_NAME) | |
1707 | SSA_NAME_DEF_STMT (def) = stmt; | |
1708 | } | |
1709 | ||
1710 | gsi_next (©_gsi); | |
e21aff8a | 1711 | } |
c2a4718a | 1712 | while (!gsi_end_p (copy_gsi)); |
726a989a RB |
1713 | |
1714 | copy_gsi = gsi_last_bb (copy_basic_block); | |
e21aff8a | 1715 | } |
726a989a | 1716 | |
e21aff8a SB |
1717 | return copy_basic_block; |
1718 | } | |
1719 | ||
110cfe1c JH |
1720 | /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA |
1721 | form is quite easy, since dominator relationship for old basic blocks does | |
1722 | not change. | |
1723 | ||
1724 | There is however exception where inlining might change dominator relation | |
1725 | across EH edges from basic block within inlined functions destinating | |
5305a4cb | 1726 | to landing pads in function we inline into. |
110cfe1c | 1727 | |
e9705dc5 AO |
1728 | The function fills in PHI_RESULTs of such PHI nodes if they refer |
1729 | to gimple regs. Otherwise, the function mark PHI_RESULT of such | |
1730 | PHI nodes for renaming. For non-gimple regs, renaming is safe: the | |
1731 | EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be | |
1732 | set, and this means that there will be no overlapping live ranges | |
110cfe1c JH |
1733 | for the underlying symbol. |
1734 | ||
1735 | This might change in future if we allow redirecting of EH edges and | |
1736 | we might want to change way build CFG pre-inlining to include | |
1737 | all the possible edges then. */ | |
1738 | static void | |
e9705dc5 AO |
1739 | update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb, |
1740 | bool can_throw, bool nonlocal_goto) | |
110cfe1c JH |
1741 | { |
1742 | edge e; | |
1743 | edge_iterator ei; | |
1744 | ||
1745 | FOR_EACH_EDGE (e, ei, bb->succs) | |
1746 | if (!e->dest->aux | |
1747 | || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK) | |
1748 | { | |
726a989a RB |
1749 | gimple phi; |
1750 | gimple_stmt_iterator si; | |
110cfe1c | 1751 | |
e9705dc5 AO |
1752 | if (!nonlocal_goto) |
1753 | gcc_assert (e->flags & EDGE_EH); | |
726a989a | 1754 | |
e9705dc5 AO |
1755 | if (!can_throw) |
1756 | gcc_assert (!(e->flags & EDGE_EH)); | |
726a989a RB |
1757 | |
1758 | for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si)) | |
110cfe1c | 1759 | { |
e9705dc5 AO |
1760 | edge re; |
1761 | ||
726a989a RB |
1762 | phi = gsi_stmt (si); |
1763 | ||
e9705dc5 AO |
1764 | /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */ |
1765 | gcc_assert (!e->dest->aux); | |
1766 | ||
496a4ef5 JH |
1767 | gcc_assert ((e->flags & EDGE_EH) |
1768 | || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi))); | |
e9705dc5 AO |
1769 | |
1770 | if (!is_gimple_reg (PHI_RESULT (phi))) | |
1771 | { | |
726a989a | 1772 | mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi))); |
e9705dc5 AO |
1773 | continue; |
1774 | } | |
1775 | ||
1776 | re = find_edge (ret_bb, e->dest); | |
1432b19f | 1777 | gcc_assert (re); |
e9705dc5 AO |
1778 | gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL)) |
1779 | == (e->flags & (EDGE_EH | EDGE_ABNORMAL))); | |
1780 | ||
1781 | SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), | |
1782 | USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re))); | |
110cfe1c JH |
1783 | } |
1784 | } | |
1785 | } | |
1786 | ||
726a989a | 1787 | |
128a79fb KH |
1788 | /* Copy edges from BB into its copy constructed earlier, scale profile |
1789 | accordingly. Edges will be taken care of later. Assume aux | |
1790 | pointers to point to the copies of each BB. */ | |
726a989a | 1791 | |
e21aff8a | 1792 | static void |
0178d644 | 1793 | copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb) |
e21aff8a | 1794 | { |
cceb1885 | 1795 | basic_block new_bb = (basic_block) bb->aux; |
e21aff8a SB |
1796 | edge_iterator ei; |
1797 | edge old_edge; | |
726a989a | 1798 | gimple_stmt_iterator si; |
e21aff8a SB |
1799 | int flags; |
1800 | ||
1801 | /* Use the indices from the original blocks to create edges for the | |
1802 | new ones. */ | |
1803 | FOR_EACH_EDGE (old_edge, ei, bb->succs) | |
e0704a46 JH |
1804 | if (!(old_edge->flags & EDGE_EH)) |
1805 | { | |
82d6e6fc | 1806 | edge new_edge; |
e21aff8a | 1807 | |
e0704a46 | 1808 | flags = old_edge->flags; |
e21aff8a | 1809 | |
e0704a46 JH |
1810 | /* Return edges do get a FALLTHRU flag when the get inlined. */ |
1811 | if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags | |
1812 | && old_edge->dest->aux != EXIT_BLOCK_PTR) | |
1813 | flags |= EDGE_FALLTHRU; | |
82d6e6fc KG |
1814 | new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags); |
1815 | new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE; | |
1816 | new_edge->probability = old_edge->probability; | |
e0704a46 | 1817 | } |
e21aff8a SB |
1818 | |
1819 | if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK) | |
1820 | return; | |
1821 | ||
726a989a | 1822 | for (si = gsi_start_bb (new_bb); !gsi_end_p (si);) |
e21aff8a | 1823 | { |
726a989a | 1824 | gimple copy_stmt; |
e9705dc5 | 1825 | bool can_throw, nonlocal_goto; |
e21aff8a | 1826 | |
726a989a | 1827 | copy_stmt = gsi_stmt (si); |
b5b8b0ac AO |
1828 | if (!is_gimple_debug (copy_stmt)) |
1829 | { | |
1830 | update_stmt (copy_stmt); | |
1831 | if (gimple_in_ssa_p (cfun)) | |
1832 | mark_symbols_for_renaming (copy_stmt); | |
1833 | } | |
726a989a | 1834 | |
e21aff8a | 1835 | /* Do this before the possible split_block. */ |
726a989a | 1836 | gsi_next (&si); |
e21aff8a SB |
1837 | |
1838 | /* If this tree could throw an exception, there are two | |
1839 | cases where we need to add abnormal edge(s): the | |
1840 | tree wasn't in a region and there is a "current | |
1841 | region" in the caller; or the original tree had | |
1842 | EH edges. In both cases split the block after the tree, | |
1843 | and add abnormal edge(s) as needed; we need both | |
1844 | those from the callee and the caller. | |
1845 | We check whether the copy can throw, because the const | |
1846 | propagation can change an INDIRECT_REF which throws | |
1847 | into a COMPONENT_REF which doesn't. If the copy | |
1848 | can throw, the original could also throw. */ | |
726a989a RB |
1849 | can_throw = stmt_can_throw_internal (copy_stmt); |
1850 | nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt); | |
e9705dc5 AO |
1851 | |
1852 | if (can_throw || nonlocal_goto) | |
e21aff8a | 1853 | { |
726a989a | 1854 | if (!gsi_end_p (si)) |
e21aff8a SB |
1855 | /* Note that bb's predecessor edges aren't necessarily |
1856 | right at this point; split_block doesn't care. */ | |
1857 | { | |
1858 | edge e = split_block (new_bb, copy_stmt); | |
110cfe1c | 1859 | |
e21aff8a | 1860 | new_bb = e->dest; |
110cfe1c | 1861 | new_bb->aux = e->src->aux; |
726a989a | 1862 | si = gsi_start_bb (new_bb); |
e21aff8a | 1863 | } |
e9705dc5 | 1864 | } |
e21aff8a | 1865 | |
1d65f45c RH |
1866 | if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH) |
1867 | make_eh_dispatch_edges (copy_stmt); | |
1868 | else if (can_throw) | |
e9705dc5 | 1869 | make_eh_edges (copy_stmt); |
110cfe1c | 1870 | |
e9705dc5 | 1871 | if (nonlocal_goto) |
726a989a | 1872 | make_abnormal_goto_edges (gimple_bb (copy_stmt), true); |
e9705dc5 AO |
1873 | |
1874 | if ((can_throw || nonlocal_goto) | |
1875 | && gimple_in_ssa_p (cfun)) | |
726a989a | 1876 | update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb, |
e9705dc5 | 1877 | can_throw, nonlocal_goto); |
110cfe1c JH |
1878 | } |
1879 | } | |
1880 | ||
1881 | /* Copy the PHIs. All blocks and edges are copied, some blocks | |
1882 | was possibly split and new outgoing EH edges inserted. | |
1883 | BB points to the block of original function and AUX pointers links | |
1884 | the original and newly copied blocks. */ | |
1885 | ||
1886 | static void | |
1887 | copy_phis_for_bb (basic_block bb, copy_body_data *id) | |
1888 | { | |
3d9a9f94 | 1889 | basic_block const new_bb = (basic_block) bb->aux; |
110cfe1c | 1890 | edge_iterator ei; |
726a989a RB |
1891 | gimple phi; |
1892 | gimple_stmt_iterator si; | |
110cfe1c | 1893 | |
726a989a | 1894 | for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si)) |
110cfe1c | 1895 | { |
726a989a RB |
1896 | tree res, new_res; |
1897 | gimple new_phi; | |
110cfe1c JH |
1898 | edge new_edge; |
1899 | ||
726a989a RB |
1900 | phi = gsi_stmt (si); |
1901 | res = PHI_RESULT (phi); | |
1902 | new_res = res; | |
110cfe1c JH |
1903 | if (is_gimple_reg (res)) |
1904 | { | |
726a989a | 1905 | walk_tree (&new_res, copy_tree_body_r, id, NULL); |
110cfe1c JH |
1906 | SSA_NAME_DEF_STMT (new_res) |
1907 | = new_phi = create_phi_node (new_res, new_bb); | |
1908 | FOR_EACH_EDGE (new_edge, ei, new_bb->preds) | |
1909 | { | |
726a989a RB |
1910 | edge const old_edge |
1911 | = find_edge ((basic_block) new_edge->src->aux, bb); | |
110cfe1c JH |
1912 | tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge); |
1913 | tree new_arg = arg; | |
726a989a RB |
1914 | tree block = id->block; |
1915 | id->block = NULL_TREE; | |
1916 | walk_tree (&new_arg, copy_tree_body_r, id, NULL); | |
1917 | id->block = block; | |
110cfe1c | 1918 | gcc_assert (new_arg); |
36b6e793 JJ |
1919 | /* With return slot optimization we can end up with |
1920 | non-gimple (foo *)&this->m, fix that here. */ | |
1921 | if (TREE_CODE (new_arg) != SSA_NAME | |
1922 | && TREE_CODE (new_arg) != FUNCTION_DECL | |
1923 | && !is_gimple_val (new_arg)) | |
1924 | { | |
726a989a RB |
1925 | gimple_seq stmts = NULL; |
1926 | new_arg = force_gimple_operand (new_arg, &stmts, true, NULL); | |
1927 | gsi_insert_seq_on_edge_immediate (new_edge, stmts); | |
36b6e793 | 1928 | } |
f5045c96 AM |
1929 | add_phi_arg (new_phi, new_arg, new_edge, |
1930 | gimple_phi_arg_location_from_edge (phi, old_edge)); | |
110cfe1c | 1931 | } |
e21aff8a SB |
1932 | } |
1933 | } | |
1934 | } | |
1935 | ||
726a989a | 1936 | |
e21aff8a | 1937 | /* Wrapper for remap_decl so it can be used as a callback. */ |
726a989a | 1938 | |
e21aff8a SB |
1939 | static tree |
1940 | remap_decl_1 (tree decl, void *data) | |
1941 | { | |
1b369fae | 1942 | return remap_decl (decl, (copy_body_data *) data); |
e21aff8a SB |
1943 | } |
1944 | ||
110cfe1c JH |
1945 | /* Build struct function and associated datastructures for the new clone |
1946 | NEW_FNDECL to be build. CALLEE_FNDECL is the original */ | |
1947 | ||
1948 | static void | |
1949 | initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count, | |
1950 | int frequency) | |
1951 | { | |
110cfe1c | 1952 | struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
0178d644 | 1953 | gcov_type count_scale, frequency_scale; |
110cfe1c JH |
1954 | |
1955 | if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count) | |
1956 | count_scale = (REG_BR_PROB_BASE * count | |
1957 | / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count); | |
1958 | else | |
1959 | count_scale = 1; | |
1960 | ||
1961 | if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency) | |
1962 | frequency_scale = (REG_BR_PROB_BASE * frequency | |
1963 | / | |
1964 | ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency); | |
1965 | else | |
1966 | frequency_scale = count_scale; | |
1967 | ||
1968 | /* Register specific tree functions. */ | |
726a989a | 1969 | gimple_register_cfg_hooks (); |
39ecc018 JH |
1970 | |
1971 | /* Get clean struct function. */ | |
1972 | push_struct_function (new_fndecl); | |
1973 | ||
1974 | /* We will rebuild these, so just sanity check that they are empty. */ | |
1975 | gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL); | |
1976 | gcc_assert (cfun->local_decls == NULL); | |
1977 | gcc_assert (cfun->cfg == NULL); | |
1978 | gcc_assert (cfun->decl == new_fndecl); | |
1979 | ||
39ecc018 JH |
1980 | /* Copy items we preserve during clonning. */ |
1981 | cfun->static_chain_decl = src_cfun->static_chain_decl; | |
1982 | cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area; | |
1983 | cfun->function_end_locus = src_cfun->function_end_locus; | |
1984 | cfun->curr_properties = src_cfun->curr_properties; | |
1985 | cfun->last_verified = src_cfun->last_verified; | |
1986 | if (src_cfun->ipa_transforms_to_apply) | |
1987 | cfun->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap, | |
1988 | src_cfun->ipa_transforms_to_apply); | |
1989 | cfun->va_list_gpr_size = src_cfun->va_list_gpr_size; | |
1990 | cfun->va_list_fpr_size = src_cfun->va_list_fpr_size; | |
1991 | cfun->function_frequency = src_cfun->function_frequency; | |
1992 | cfun->has_nonlocal_label = src_cfun->has_nonlocal_label; | |
1993 | cfun->stdarg = src_cfun->stdarg; | |
1994 | cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p; | |
1995 | cfun->after_inlining = src_cfun->after_inlining; | |
1996 | cfun->returns_struct = src_cfun->returns_struct; | |
1997 | cfun->returns_pcc_struct = src_cfun->returns_pcc_struct; | |
1998 | cfun->after_tree_profile = src_cfun->after_tree_profile; | |
1999 | ||
110cfe1c JH |
2000 | init_empty_tree_cfg (); |
2001 | ||
2002 | ENTRY_BLOCK_PTR->count = | |
2003 | (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale / | |
2004 | REG_BR_PROB_BASE); | |
2005 | ENTRY_BLOCK_PTR->frequency = | |
2006 | (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency * | |
2007 | frequency_scale / REG_BR_PROB_BASE); | |
2008 | EXIT_BLOCK_PTR->count = | |
2009 | (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale / | |
2010 | REG_BR_PROB_BASE); | |
2011 | EXIT_BLOCK_PTR->frequency = | |
2012 | (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency * | |
2013 | frequency_scale / REG_BR_PROB_BASE); | |
2014 | if (src_cfun->eh) | |
2015 | init_eh_for_function (); | |
2016 | ||
2017 | if (src_cfun->gimple_df) | |
2018 | { | |
5db9ba0c | 2019 | init_tree_ssa (cfun); |
110cfe1c JH |
2020 | cfun->gimple_df->in_ssa_p = true; |
2021 | init_ssa_operands (); | |
2022 | } | |
2023 | pop_cfun (); | |
2024 | } | |
2025 | ||
e21aff8a SB |
2026 | /* Make a copy of the body of FN so that it can be inserted inline in |
2027 | another function. Walks FN via CFG, returns new fndecl. */ | |
2028 | ||
2029 | static tree | |
1b369fae | 2030 | copy_cfg_body (copy_body_data * id, gcov_type count, int frequency, |
e21aff8a SB |
2031 | basic_block entry_block_map, basic_block exit_block_map) |
2032 | { | |
1b369fae | 2033 | tree callee_fndecl = id->src_fn; |
e21aff8a | 2034 | /* Original cfun for the callee, doesn't change. */ |
1b369fae | 2035 | struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
110cfe1c | 2036 | struct function *cfun_to_copy; |
e21aff8a SB |
2037 | basic_block bb; |
2038 | tree new_fndecl = NULL; | |
0178d644 | 2039 | gcov_type count_scale, frequency_scale; |
110cfe1c | 2040 | int last; |
e21aff8a | 2041 | |
1b369fae | 2042 | if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count) |
e21aff8a | 2043 | count_scale = (REG_BR_PROB_BASE * count |
1b369fae | 2044 | / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count); |
e21aff8a SB |
2045 | else |
2046 | count_scale = 1; | |
2047 | ||
1b369fae | 2048 | if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency) |
e21aff8a SB |
2049 | frequency_scale = (REG_BR_PROB_BASE * frequency |
2050 | / | |
1b369fae | 2051 | ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency); |
e21aff8a SB |
2052 | else |
2053 | frequency_scale = count_scale; | |
2054 | ||
2055 | /* Register specific tree functions. */ | |
726a989a | 2056 | gimple_register_cfg_hooks (); |
e21aff8a SB |
2057 | |
2058 | /* Must have a CFG here at this point. */ | |
2059 | gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION | |
2060 | (DECL_STRUCT_FUNCTION (callee_fndecl))); | |
2061 | ||
110cfe1c JH |
2062 | cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
2063 | ||
e21aff8a SB |
2064 | ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map; |
2065 | EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map; | |
110cfe1c JH |
2066 | entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy); |
2067 | exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy); | |
e21aff8a | 2068 | |
e21aff8a SB |
2069 | /* Duplicate any exception-handling regions. */ |
2070 | if (cfun->eh) | |
1d65f45c RH |
2071 | id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr, |
2072 | remap_decl_1, id); | |
726a989a | 2073 | |
e21aff8a SB |
2074 | /* Use aux pointers to map the original blocks to copy. */ |
2075 | FOR_EACH_BB_FN (bb, cfun_to_copy) | |
110cfe1c | 2076 | { |
82d6e6fc KG |
2077 | basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale); |
2078 | bb->aux = new_bb; | |
2079 | new_bb->aux = bb; | |
110cfe1c JH |
2080 | } |
2081 | ||
7c57be85 | 2082 | last = last_basic_block; |
726a989a | 2083 | |
e21aff8a SB |
2084 | /* Now that we've duplicated the blocks, duplicate their edges. */ |
2085 | FOR_ALL_BB_FN (bb, cfun_to_copy) | |
e9705dc5 | 2086 | copy_edges_for_bb (bb, count_scale, exit_block_map); |
726a989a | 2087 | |
110cfe1c JH |
2088 | if (gimple_in_ssa_p (cfun)) |
2089 | FOR_ALL_BB_FN (bb, cfun_to_copy) | |
2090 | copy_phis_for_bb (bb, id); | |
726a989a | 2091 | |
e21aff8a | 2092 | FOR_ALL_BB_FN (bb, cfun_to_copy) |
110cfe1c JH |
2093 | { |
2094 | ((basic_block)bb->aux)->aux = NULL; | |
2095 | bb->aux = NULL; | |
2096 | } | |
726a989a | 2097 | |
110cfe1c JH |
2098 | /* Zero out AUX fields of newly created block during EH edge |
2099 | insertion. */ | |
7c57be85 | 2100 | for (; last < last_basic_block; last++) |
110cfe1c JH |
2101 | BASIC_BLOCK (last)->aux = NULL; |
2102 | entry_block_map->aux = NULL; | |
2103 | exit_block_map->aux = NULL; | |
e21aff8a | 2104 | |
1d65f45c RH |
2105 | if (id->eh_map) |
2106 | { | |
2107 | pointer_map_destroy (id->eh_map); | |
2108 | id->eh_map = NULL; | |
2109 | } | |
2110 | ||
e21aff8a SB |
2111 | return new_fndecl; |
2112 | } | |
2113 | ||
b5b8b0ac AO |
2114 | /* Copy the debug STMT using ID. We deal with these statements in a |
2115 | special way: if any variable in their VALUE expression wasn't | |
2116 | remapped yet, we won't remap it, because that would get decl uids | |
2117 | out of sync, causing codegen differences between -g and -g0. If | |
2118 | this arises, we drop the VALUE expression altogether. */ | |
2119 | ||
2120 | static void | |
2121 | copy_debug_stmt (gimple stmt, copy_body_data *id) | |
2122 | { | |
2123 | tree t, *n; | |
2124 | struct walk_stmt_info wi; | |
2125 | ||
2126 | t = id->block; | |
2127 | if (gimple_block (stmt)) | |
2128 | { | |
2129 | tree *n; | |
2130 | n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt)); | |
2131 | if (n) | |
2132 | t = *n; | |
2133 | } | |
2134 | gimple_set_block (stmt, t); | |
2135 | ||
2136 | /* Remap all the operands in COPY. */ | |
2137 | memset (&wi, 0, sizeof (wi)); | |
2138 | wi.info = id; | |
2139 | ||
2140 | processing_debug_stmt = 1; | |
2141 | ||
2142 | t = gimple_debug_bind_get_var (stmt); | |
2143 | ||
2144 | if (TREE_CODE (t) == PARM_DECL && id->debug_map | |
2145 | && (n = (tree *) pointer_map_contains (id->debug_map, t))) | |
2146 | { | |
2147 | gcc_assert (TREE_CODE (*n) == VAR_DECL); | |
2148 | t = *n; | |
2149 | } | |
2150 | else | |
2151 | walk_tree (&t, remap_gimple_op_r, &wi, NULL); | |
2152 | ||
2153 | gimple_debug_bind_set_var (stmt, t); | |
2154 | ||
2155 | if (gimple_debug_bind_has_value_p (stmt)) | |
2156 | walk_tree (gimple_debug_bind_get_value_ptr (stmt), | |
2157 | remap_gimple_op_r, &wi, NULL); | |
2158 | ||
2159 | /* Punt if any decl couldn't be remapped. */ | |
2160 | if (processing_debug_stmt < 0) | |
2161 | gimple_debug_bind_reset_value (stmt); | |
2162 | ||
2163 | processing_debug_stmt = 0; | |
2164 | ||
2165 | update_stmt (stmt); | |
2166 | if (gimple_in_ssa_p (cfun)) | |
2167 | mark_symbols_for_renaming (stmt); | |
2168 | } | |
2169 | ||
2170 | /* Process deferred debug stmts. In order to give values better odds | |
2171 | of being successfully remapped, we delay the processing of debug | |
2172 | stmts until all other stmts that might require remapping are | |
2173 | processed. */ | |
2174 | ||
2175 | static void | |
2176 | copy_debug_stmts (copy_body_data *id) | |
2177 | { | |
2178 | size_t i; | |
2179 | gimple stmt; | |
2180 | ||
2181 | if (!id->debug_stmts) | |
2182 | return; | |
2183 | ||
2184 | for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++) | |
2185 | copy_debug_stmt (stmt, id); | |
2186 | ||
2187 | VEC_free (gimple, heap, id->debug_stmts); | |
2188 | } | |
2189 | ||
f82a627c EB |
2190 | /* Make a copy of the body of SRC_FN so that it can be inserted inline in |
2191 | another function. */ | |
2192 | ||
2193 | static tree | |
2194 | copy_tree_body (copy_body_data *id) | |
2195 | { | |
2196 | tree fndecl = id->src_fn; | |
2197 | tree body = DECL_SAVED_TREE (fndecl); | |
2198 | ||
2199 | walk_tree (&body, copy_tree_body_r, id, NULL); | |
2200 | ||
2201 | return body; | |
2202 | } | |
2203 | ||
b5b8b0ac AO |
2204 | /* Make a copy of the body of FN so that it can be inserted inline in |
2205 | another function. */ | |
2206 | ||
e21aff8a | 2207 | static tree |
1b369fae | 2208 | copy_body (copy_body_data *id, gcov_type count, int frequency, |
e21aff8a SB |
2209 | basic_block entry_block_map, basic_block exit_block_map) |
2210 | { | |
1b369fae | 2211 | tree fndecl = id->src_fn; |
e21aff8a SB |
2212 | tree body; |
2213 | ||
2214 | /* If this body has a CFG, walk CFG and copy. */ | |
2215 | gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl))); | |
2216 | body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map); | |
b5b8b0ac | 2217 | copy_debug_stmts (id); |
e21aff8a SB |
2218 | |
2219 | return body; | |
2220 | } | |
2221 | ||
04482133 AO |
2222 | /* Return true if VALUE is an ADDR_EXPR of an automatic variable |
2223 | defined in function FN, or of a data member thereof. */ | |
2224 | ||
2225 | static bool | |
2226 | self_inlining_addr_expr (tree value, tree fn) | |
2227 | { | |
2228 | tree var; | |
2229 | ||
2230 | if (TREE_CODE (value) != ADDR_EXPR) | |
2231 | return false; | |
2232 | ||
2233 | var = get_base_address (TREE_OPERAND (value, 0)); | |
e21aff8a | 2234 | |
50886bf1 | 2235 | return var && auto_var_in_fn_p (var, fn); |
04482133 AO |
2236 | } |
2237 | ||
b5b8b0ac AO |
2238 | /* Append to BB a debug annotation that binds VAR to VALUE, inheriting |
2239 | lexical block and line number information from base_stmt, if given, | |
2240 | or from the last stmt of the block otherwise. */ | |
2241 | ||
2242 | static gimple | |
2243 | insert_init_debug_bind (copy_body_data *id, | |
2244 | basic_block bb, tree var, tree value, | |
2245 | gimple base_stmt) | |
2246 | { | |
2247 | gimple note; | |
2248 | gimple_stmt_iterator gsi; | |
2249 | tree tracked_var; | |
2250 | ||
2251 | if (!gimple_in_ssa_p (id->src_cfun)) | |
2252 | return NULL; | |
2253 | ||
2254 | if (!MAY_HAVE_DEBUG_STMTS) | |
2255 | return NULL; | |
2256 | ||
2257 | tracked_var = target_for_debug_bind (var); | |
2258 | if (!tracked_var) | |
2259 | return NULL; | |
2260 | ||
2261 | if (bb) | |
2262 | { | |
2263 | gsi = gsi_last_bb (bb); | |
2264 | if (!base_stmt && !gsi_end_p (gsi)) | |
2265 | base_stmt = gsi_stmt (gsi); | |
2266 | } | |
2267 | ||
2268 | note = gimple_build_debug_bind (tracked_var, value, base_stmt); | |
2269 | ||
2270 | if (bb) | |
2271 | { | |
2272 | if (!gsi_end_p (gsi)) | |
2273 | gsi_insert_after (&gsi, note, GSI_SAME_STMT); | |
2274 | else | |
2275 | gsi_insert_before (&gsi, note, GSI_SAME_STMT); | |
2276 | } | |
2277 | ||
2278 | return note; | |
2279 | } | |
2280 | ||
6de9cd9a | 2281 | static void |
b5b8b0ac | 2282 | insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt) |
0f1961a2 | 2283 | { |
0f1961a2 JH |
2284 | /* If VAR represents a zero-sized variable, it's possible that the |
2285 | assignment statement may result in no gimple statements. */ | |
2286 | if (init_stmt) | |
c2a4718a JJ |
2287 | { |
2288 | gimple_stmt_iterator si = gsi_last_bb (bb); | |
0f1961a2 | 2289 | |
bfb0b886 RG |
2290 | /* We can end up with init statements that store to a non-register |
2291 | from a rhs with a conversion. Handle that here by forcing the | |
2292 | rhs into a temporary. gimple_regimplify_operands is not | |
2293 | prepared to do this for us. */ | |
b5b8b0ac AO |
2294 | if (!is_gimple_debug (init_stmt) |
2295 | && !is_gimple_reg (gimple_assign_lhs (init_stmt)) | |
bfb0b886 RG |
2296 | && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt))) |
2297 | && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS) | |
2298 | { | |
2299 | tree rhs = build1 (gimple_assign_rhs_code (init_stmt), | |
2300 | gimple_expr_type (init_stmt), | |
2301 | gimple_assign_rhs1 (init_stmt)); | |
2302 | rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false, | |
2303 | GSI_NEW_STMT); | |
2304 | gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs)); | |
2305 | gimple_assign_set_rhs1 (init_stmt, rhs); | |
2306 | } | |
c2a4718a JJ |
2307 | gsi_insert_after (&si, init_stmt, GSI_NEW_STMT); |
2308 | gimple_regimplify_operands (init_stmt, &si); | |
2309 | mark_symbols_for_renaming (init_stmt); | |
b5b8b0ac AO |
2310 | |
2311 | if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS) | |
2312 | { | |
2313 | tree var, def = gimple_assign_lhs (init_stmt); | |
2314 | ||
2315 | if (TREE_CODE (def) == SSA_NAME) | |
2316 | var = SSA_NAME_VAR (def); | |
2317 | else | |
2318 | var = def; | |
2319 | ||
2320 | insert_init_debug_bind (id, bb, var, def, init_stmt); | |
2321 | } | |
c2a4718a | 2322 | } |
0f1961a2 JH |
2323 | } |
2324 | ||
2325 | /* Initialize parameter P with VALUE. If needed, produce init statement | |
2326 | at the end of BB. When BB is NULL, we return init statement to be | |
2327 | output later. */ | |
2328 | static gimple | |
1b369fae | 2329 | setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn, |
e21aff8a | 2330 | basic_block bb, tree *vars) |
6de9cd9a | 2331 | { |
0f1961a2 | 2332 | gimple init_stmt = NULL; |
6de9cd9a | 2333 | tree var; |
f4088621 | 2334 | tree rhs = value; |
110cfe1c JH |
2335 | tree def = (gimple_in_ssa_p (cfun) |
2336 | ? gimple_default_def (id->src_cfun, p) : NULL); | |
6de9cd9a | 2337 | |
f4088621 RG |
2338 | if (value |
2339 | && value != error_mark_node | |
2340 | && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))) | |
c54e3854 RG |
2341 | { |
2342 | if (fold_convertible_p (TREE_TYPE (p), value)) | |
2343 | rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value); | |
2344 | else | |
2345 | /* ??? For valid (GIMPLE) programs we should not end up here. | |
2346 | Still if something has gone wrong and we end up with truly | |
2347 | mismatched types here, fall back to using a VIEW_CONVERT_EXPR | |
2348 | to not leak invalid GIMPLE to the following passes. */ | |
2349 | rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value); | |
2350 | } | |
f4088621 | 2351 | |
b5b8b0ac AO |
2352 | /* Make an equivalent VAR_DECL. Note that we must NOT remap the type |
2353 | here since the type of this decl must be visible to the calling | |
2354 | function. */ | |
2355 | var = copy_decl_to_var (p, id); | |
2356 | ||
2357 | /* We're actually using the newly-created var. */ | |
2358 | if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL) | |
2359 | { | |
2360 | get_var_ann (var); | |
2361 | add_referenced_var (var); | |
2362 | } | |
2363 | ||
2364 | /* Declare this new variable. */ | |
2365 | TREE_CHAIN (var) = *vars; | |
2366 | *vars = var; | |
2367 | ||
2368 | /* Make gimplifier happy about this variable. */ | |
2369 | DECL_SEEN_IN_BIND_EXPR_P (var) = 1; | |
2370 | ||
110cfe1c | 2371 | /* If the parameter is never assigned to, has no SSA_NAMEs created, |
b5b8b0ac AO |
2372 | we would not need to create a new variable here at all, if it |
2373 | weren't for debug info. Still, we can just use the argument | |
2374 | value. */ | |
6de9cd9a DN |
2375 | if (TREE_READONLY (p) |
2376 | && !TREE_ADDRESSABLE (p) | |
110cfe1c JH |
2377 | && value && !TREE_SIDE_EFFECTS (value) |
2378 | && !def) | |
6de9cd9a | 2379 | { |
84936f6f RH |
2380 | /* We may produce non-gimple trees by adding NOPs or introduce |
2381 | invalid sharing when operand is not really constant. | |
2382 | It is not big deal to prohibit constant propagation here as | |
2383 | we will constant propagate in DOM1 pass anyway. */ | |
2384 | if (is_gimple_min_invariant (value) | |
f4088621 RG |
2385 | && useless_type_conversion_p (TREE_TYPE (p), |
2386 | TREE_TYPE (value)) | |
04482133 AO |
2387 | /* We have to be very careful about ADDR_EXPR. Make sure |
2388 | the base variable isn't a local variable of the inlined | |
2389 | function, e.g., when doing recursive inlining, direct or | |
2390 | mutually-recursive or whatever, which is why we don't | |
2391 | just test whether fn == current_function_decl. */ | |
2392 | && ! self_inlining_addr_expr (value, fn)) | |
6de9cd9a | 2393 | { |
6de9cd9a | 2394 | insert_decl_map (id, p, value); |
b5b8b0ac AO |
2395 | insert_debug_decl_map (id, p, var); |
2396 | return insert_init_debug_bind (id, bb, var, value, NULL); | |
6de9cd9a DN |
2397 | } |
2398 | } | |
2399 | ||
6de9cd9a DN |
2400 | /* Register the VAR_DECL as the equivalent for the PARM_DECL; |
2401 | that way, when the PARM_DECL is encountered, it will be | |
2402 | automatically replaced by the VAR_DECL. */ | |
7c7d3047 | 2403 | insert_decl_map (id, p, var); |
6de9cd9a | 2404 | |
6de9cd9a DN |
2405 | /* Even if P was TREE_READONLY, the new VAR should not be. |
2406 | In the original code, we would have constructed a | |
2407 | temporary, and then the function body would have never | |
2408 | changed the value of P. However, now, we will be | |
2409 | constructing VAR directly. The constructor body may | |
2410 | change its value multiple times as it is being | |
2411 | constructed. Therefore, it must not be TREE_READONLY; | |
2412 | the back-end assumes that TREE_READONLY variable is | |
2413 | assigned to only once. */ | |
2414 | if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p))) | |
2415 | TREE_READONLY (var) = 0; | |
2416 | ||
110cfe1c JH |
2417 | /* If there is no setup required and we are in SSA, take the easy route |
2418 | replacing all SSA names representing the function parameter by the | |
2419 | SSA name passed to function. | |
2420 | ||
2421 | We need to construct map for the variable anyway as it might be used | |
2422 | in different SSA names when parameter is set in function. | |
2423 | ||
8454d27e JH |
2424 | Do replacement at -O0 for const arguments replaced by constant. |
2425 | This is important for builtin_constant_p and other construct requiring | |
b5b8b0ac | 2426 | constant argument to be visible in inlined function body. */ |
110cfe1c | 2427 | if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p) |
8454d27e JH |
2428 | && (optimize |
2429 | || (TREE_READONLY (p) | |
2430 | && is_gimple_min_invariant (rhs))) | |
110cfe1c | 2431 | && (TREE_CODE (rhs) == SSA_NAME |
9b718f81 JH |
2432 | || is_gimple_min_invariant (rhs)) |
2433 | && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)) | |
110cfe1c JH |
2434 | { |
2435 | insert_decl_map (id, def, rhs); | |
b5b8b0ac | 2436 | return insert_init_debug_bind (id, bb, var, rhs, NULL); |
110cfe1c JH |
2437 | } |
2438 | ||
f6f2da7d JH |
2439 | /* If the value of argument is never used, don't care about initializing |
2440 | it. */ | |
1cf5abb3 | 2441 | if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p)) |
f6f2da7d JH |
2442 | { |
2443 | gcc_assert (!value || !TREE_SIDE_EFFECTS (value)); | |
b5b8b0ac | 2444 | return insert_init_debug_bind (id, bb, var, rhs, NULL); |
f6f2da7d JH |
2445 | } |
2446 | ||
6de9cd9a DN |
2447 | /* Initialize this VAR_DECL from the equivalent argument. Convert |
2448 | the argument to the proper type in case it was promoted. */ | |
2449 | if (value) | |
2450 | { | |
6de9cd9a | 2451 | if (rhs == error_mark_node) |
110cfe1c | 2452 | { |
7c7d3047 | 2453 | insert_decl_map (id, p, var); |
b5b8b0ac | 2454 | return insert_init_debug_bind (id, bb, var, rhs, NULL); |
110cfe1c | 2455 | } |
afe08db5 | 2456 | |
73dab33b | 2457 | STRIP_USELESS_TYPE_CONVERSION (rhs); |
6de9cd9a | 2458 | |
726a989a | 2459 | /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we |
6de9cd9a | 2460 | keep our trees in gimple form. */ |
110cfe1c JH |
2461 | if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p)) |
2462 | { | |
2463 | def = remap_ssa_name (def, id); | |
726a989a | 2464 | init_stmt = gimple_build_assign (def, rhs); |
110cfe1c JH |
2465 | SSA_NAME_IS_DEFAULT_DEF (def) = 0; |
2466 | set_default_def (var, NULL); | |
2467 | } | |
2468 | else | |
726a989a | 2469 | init_stmt = gimple_build_assign (var, rhs); |
6de9cd9a | 2470 | |
0f1961a2 | 2471 | if (bb && init_stmt) |
b5b8b0ac | 2472 | insert_init_stmt (id, bb, init_stmt); |
6de9cd9a | 2473 | } |
0f1961a2 | 2474 | return init_stmt; |
6de9cd9a DN |
2475 | } |
2476 | ||
d4e4baa9 | 2477 | /* Generate code to initialize the parameters of the function at the |
726a989a | 2478 | top of the stack in ID from the GIMPLE_CALL STMT. */ |
d4e4baa9 | 2479 | |
e21aff8a | 2480 | static void |
726a989a | 2481 | initialize_inlined_parameters (copy_body_data *id, gimple stmt, |
e21aff8a | 2482 | tree fn, basic_block bb) |
d4e4baa9 | 2483 | { |
d4e4baa9 | 2484 | tree parms; |
726a989a | 2485 | size_t i; |
d4e4baa9 | 2486 | tree p; |
d436bff8 | 2487 | tree vars = NULL_TREE; |
726a989a | 2488 | tree static_chain = gimple_call_chain (stmt); |
d4e4baa9 AO |
2489 | |
2490 | /* Figure out what the parameters are. */ | |
18c6ada9 | 2491 | parms = DECL_ARGUMENTS (fn); |
d4e4baa9 | 2492 | |
d4e4baa9 AO |
2493 | /* Loop through the parameter declarations, replacing each with an |
2494 | equivalent VAR_DECL, appropriately initialized. */ | |
726a989a RB |
2495 | for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++) |
2496 | { | |
2497 | tree val; | |
2498 | val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL; | |
2499 | setup_one_parameter (id, p, val, fn, bb, &vars); | |
2500 | } | |
4838c5ee | 2501 | |
6de9cd9a DN |
2502 | /* Initialize the static chain. */ |
2503 | p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl; | |
ea99e0be | 2504 | gcc_assert (fn != current_function_decl); |
6de9cd9a DN |
2505 | if (p) |
2506 | { | |
2507 | /* No static chain? Seems like a bug in tree-nested.c. */ | |
1e128c5f | 2508 | gcc_assert (static_chain); |
4838c5ee | 2509 | |
e21aff8a | 2510 | setup_one_parameter (id, p, static_chain, fn, bb, &vars); |
4838c5ee AO |
2511 | } |
2512 | ||
e21aff8a | 2513 | declare_inline_vars (id->block, vars); |
d4e4baa9 AO |
2514 | } |
2515 | ||
726a989a | 2516 | |
e21aff8a SB |
2517 | /* Declare a return variable to replace the RESULT_DECL for the |
2518 | function we are calling. An appropriate DECL_STMT is returned. | |
2519 | The USE_STMT is filled to contain a use of the declaration to | |
2520 | indicate the return value of the function. | |
2521 | ||
110cfe1c JH |
2522 | RETURN_SLOT, if non-null is place where to store the result. It |
2523 | is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null, | |
726a989a | 2524 | was the LHS of the MODIFY_EXPR to which this call is the RHS. |
7740f00d RH |
2525 | |
2526 | The return value is a (possibly null) value that is the result of the | |
2527 | function as seen by the callee. *USE_P is a (possibly null) value that | |
2528 | holds the result as seen by the caller. */ | |
d4e4baa9 | 2529 | |
d436bff8 | 2530 | static tree |
110cfe1c JH |
2531 | declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest, |
2532 | tree *use_p) | |
d4e4baa9 | 2533 | { |
1b369fae RH |
2534 | tree callee = id->src_fn; |
2535 | tree caller = id->dst_fn; | |
7740f00d RH |
2536 | tree result = DECL_RESULT (callee); |
2537 | tree callee_type = TREE_TYPE (result); | |
2538 | tree caller_type = TREE_TYPE (TREE_TYPE (callee)); | |
2539 | tree var, use; | |
d4e4baa9 AO |
2540 | |
2541 | /* We don't need to do anything for functions that don't return | |
2542 | anything. */ | |
7740f00d | 2543 | if (!result || VOID_TYPE_P (callee_type)) |
d4e4baa9 | 2544 | { |
6de9cd9a | 2545 | *use_p = NULL_TREE; |
d4e4baa9 AO |
2546 | return NULL_TREE; |
2547 | } | |
2548 | ||
cc77ae10 | 2549 | /* If there was a return slot, then the return value is the |
7740f00d | 2550 | dereferenced address of that object. */ |
110cfe1c | 2551 | if (return_slot) |
7740f00d | 2552 | { |
110cfe1c | 2553 | /* The front end shouldn't have used both return_slot and |
7740f00d | 2554 | a modify expression. */ |
1e128c5f | 2555 | gcc_assert (!modify_dest); |
cc77ae10 | 2556 | if (DECL_BY_REFERENCE (result)) |
110cfe1c JH |
2557 | { |
2558 | tree return_slot_addr = build_fold_addr_expr (return_slot); | |
2559 | STRIP_USELESS_TYPE_CONVERSION (return_slot_addr); | |
2560 | ||
2561 | /* We are going to construct *&return_slot and we can't do that | |
2562 | for variables believed to be not addressable. | |
2563 | ||
2564 | FIXME: This check possibly can match, because values returned | |
2565 | via return slot optimization are not believed to have address | |
2566 | taken by alias analysis. */ | |
2567 | gcc_assert (TREE_CODE (return_slot) != SSA_NAME); | |
2568 | if (gimple_in_ssa_p (cfun)) | |
2569 | { | |
2570 | HOST_WIDE_INT bitsize; | |
2571 | HOST_WIDE_INT bitpos; | |
2572 | tree offset; | |
2573 | enum machine_mode mode; | |
2574 | int unsignedp; | |
2575 | int volatilep; | |
2576 | tree base; | |
2577 | base = get_inner_reference (return_slot, &bitsize, &bitpos, | |
2578 | &offset, | |
2579 | &mode, &unsignedp, &volatilep, | |
2580 | false); | |
2581 | if (TREE_CODE (base) == INDIRECT_REF) | |
2582 | base = TREE_OPERAND (base, 0); | |
2583 | if (TREE_CODE (base) == SSA_NAME) | |
2584 | base = SSA_NAME_VAR (base); | |
2585 | mark_sym_for_renaming (base); | |
2586 | } | |
2587 | var = return_slot_addr; | |
2588 | } | |
cc77ae10 | 2589 | else |
110cfe1c JH |
2590 | { |
2591 | var = return_slot; | |
2592 | gcc_assert (TREE_CODE (var) != SSA_NAME); | |
b5ca517c | 2593 | TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result); |
110cfe1c | 2594 | } |
0890b981 AP |
2595 | if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE |
2596 | || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE) | |
2597 | && !DECL_GIMPLE_REG_P (result) | |
22918034 | 2598 | && DECL_P (var)) |
0890b981 | 2599 | DECL_GIMPLE_REG_P (var) = 0; |
7740f00d RH |
2600 | use = NULL; |
2601 | goto done; | |
2602 | } | |
2603 | ||
2604 | /* All types requiring non-trivial constructors should have been handled. */ | |
1e128c5f | 2605 | gcc_assert (!TREE_ADDRESSABLE (callee_type)); |
7740f00d RH |
2606 | |
2607 | /* Attempt to avoid creating a new temporary variable. */ | |
110cfe1c JH |
2608 | if (modify_dest |
2609 | && TREE_CODE (modify_dest) != SSA_NAME) | |
7740f00d RH |
2610 | { |
2611 | bool use_it = false; | |
2612 | ||
2613 | /* We can't use MODIFY_DEST if there's type promotion involved. */ | |
f4088621 | 2614 | if (!useless_type_conversion_p (callee_type, caller_type)) |
7740f00d RH |
2615 | use_it = false; |
2616 | ||
2617 | /* ??? If we're assigning to a variable sized type, then we must | |
2618 | reuse the destination variable, because we've no good way to | |
2619 | create variable sized temporaries at this point. */ | |
2620 | else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST) | |
2621 | use_it = true; | |
2622 | ||
2623 | /* If the callee cannot possibly modify MODIFY_DEST, then we can | |
2624 | reuse it as the result of the call directly. Don't do this if | |
2625 | it would promote MODIFY_DEST to addressable. */ | |
e2f9fe42 RH |
2626 | else if (TREE_ADDRESSABLE (result)) |
2627 | use_it = false; | |
2628 | else | |
2629 | { | |
2630 | tree base_m = get_base_address (modify_dest); | |
2631 | ||
2632 | /* If the base isn't a decl, then it's a pointer, and we don't | |
2633 | know where that's going to go. */ | |
2634 | if (!DECL_P (base_m)) | |
2635 | use_it = false; | |
2636 | else if (is_global_var (base_m)) | |
2637 | use_it = false; | |
0890b981 AP |
2638 | else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE |
2639 | || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE) | |
2640 | && !DECL_GIMPLE_REG_P (result) | |
2641 | && DECL_GIMPLE_REG_P (base_m)) | |
1d327c16 | 2642 | use_it = false; |
e2f9fe42 RH |
2643 | else if (!TREE_ADDRESSABLE (base_m)) |
2644 | use_it = true; | |
2645 | } | |
7740f00d RH |
2646 | |
2647 | if (use_it) | |
2648 | { | |
2649 | var = modify_dest; | |
2650 | use = NULL; | |
2651 | goto done; | |
2652 | } | |
2653 | } | |
2654 | ||
1e128c5f | 2655 | gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST); |
7740f00d | 2656 | |
c08cd4c1 | 2657 | var = copy_result_decl_to_var (result, id); |
110cfe1c JH |
2658 | if (gimple_in_ssa_p (cfun)) |
2659 | { | |
2660 | get_var_ann (var); | |
2661 | add_referenced_var (var); | |
2662 | } | |
e21aff8a | 2663 | |
7740f00d | 2664 | DECL_SEEN_IN_BIND_EXPR_P (var) = 1; |
cb91fab0 | 2665 | DECL_STRUCT_FUNCTION (caller)->local_decls |
7740f00d | 2666 | = tree_cons (NULL_TREE, var, |
cb91fab0 | 2667 | DECL_STRUCT_FUNCTION (caller)->local_decls); |
7740f00d | 2668 | |
6de9cd9a | 2669 | /* Do not have the rest of GCC warn about this variable as it should |
471854f8 | 2670 | not be visible to the user. */ |
6de9cd9a | 2671 | TREE_NO_WARNING (var) = 1; |
d4e4baa9 | 2672 | |
c08cd4c1 JM |
2673 | declare_inline_vars (id->block, var); |
2674 | ||
7740f00d RH |
2675 | /* Build the use expr. If the return type of the function was |
2676 | promoted, convert it back to the expected type. */ | |
2677 | use = var; | |
f4088621 | 2678 | if (!useless_type_conversion_p (caller_type, TREE_TYPE (var))) |
7740f00d | 2679 | use = fold_convert (caller_type, var); |
73dab33b AP |
2680 | |
2681 | STRIP_USELESS_TYPE_CONVERSION (use); | |
7740f00d | 2682 | |
c08cd4c1 | 2683 | if (DECL_BY_REFERENCE (result)) |
32848948 RG |
2684 | { |
2685 | TREE_ADDRESSABLE (var) = 1; | |
2686 | var = build_fold_addr_expr (var); | |
2687 | } | |
c08cd4c1 | 2688 | |
7740f00d | 2689 | done: |
d4e4baa9 AO |
2690 | /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that |
2691 | way, when the RESULT_DECL is encountered, it will be | |
2692 | automatically replaced by the VAR_DECL. */ | |
5e20bdd7 | 2693 | insert_decl_map (id, result, var); |
d4e4baa9 | 2694 | |
6de9cd9a DN |
2695 | /* Remember this so we can ignore it in remap_decls. */ |
2696 | id->retvar = var; | |
2697 | ||
7740f00d RH |
2698 | *use_p = use; |
2699 | return var; | |
d4e4baa9 AO |
2700 | } |
2701 | ||
27dbd3ac RH |
2702 | /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference |
2703 | to a local label. */ | |
4838c5ee | 2704 | |
27dbd3ac RH |
2705 | static tree |
2706 | has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp) | |
4838c5ee | 2707 | { |
27dbd3ac RH |
2708 | tree node = *nodep; |
2709 | tree fn = (tree) fnp; | |
726a989a | 2710 | |
27dbd3ac RH |
2711 | if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn) |
2712 | return node; | |
2713 | ||
2714 | if (TYPE_P (node)) | |
2715 | *walk_subtrees = 0; | |
2716 | ||
2717 | return NULL_TREE; | |
2718 | } | |
726a989a | 2719 | |
27dbd3ac RH |
2720 | /* Callback through walk_tree. Determine if we've got an aggregate |
2721 | type that we can't support; return non-null if so. */ | |
726a989a RB |
2722 | |
2723 | static tree | |
27dbd3ac RH |
2724 | cannot_copy_type_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED, |
2725 | void *data ATTRIBUTE_UNUSED) | |
726a989a | 2726 | { |
27dbd3ac | 2727 | tree t, node = *nodep; |
726a989a RB |
2728 | |
2729 | if (TREE_CODE (node) == RECORD_TYPE || TREE_CODE (node) == UNION_TYPE) | |
2730 | { | |
2731 | /* We cannot inline a function of the form | |
2732 | ||
2733 | void F (int i) { struct S { int ar[i]; } s; } | |
2734 | ||
2735 | Attempting to do so produces a catch-22. | |
2736 | If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/ | |
2737 | UNION_TYPE nodes, then it goes into infinite recursion on a | |
2738 | structure containing a pointer to its own type. If it doesn't, | |
2739 | then the type node for S doesn't get adjusted properly when | |
2740 | F is inlined. | |
2741 | ||
2742 | ??? This is likely no longer true, but it's too late in the 4.0 | |
2743 | cycle to try to find out. This should be checked for 4.1. */ | |
2744 | for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t)) | |
2745 | if (variably_modified_type_p (TREE_TYPE (t), NULL)) | |
27dbd3ac | 2746 | return node; |
726a989a RB |
2747 | } |
2748 | ||
2749 | return NULL_TREE; | |
4838c5ee AO |
2750 | } |
2751 | ||
726a989a | 2752 | |
27dbd3ac RH |
2753 | /* Determine if the function can be copied. If so return NULL. If |
2754 | not return a string describng the reason for failure. */ | |
2755 | ||
2756 | static const char * | |
2757 | copy_forbidden (struct function *fun, tree fndecl) | |
2758 | { | |
2759 | const char *reason = fun->cannot_be_copied_reason; | |
2760 | tree step; | |
2761 | ||
2762 | /* Only examine the function once. */ | |
2763 | if (fun->cannot_be_copied_set) | |
2764 | return reason; | |
2765 | ||
2766 | /* We cannot copy a function that receives a non-local goto | |
2767 | because we cannot remap the destination label used in the | |
2768 | function that is performing the non-local goto. */ | |
2769 | /* ??? Actually, this should be possible, if we work at it. | |
2770 | No doubt there's just a handful of places that simply | |
2771 | assume it doesn't happen and don't substitute properly. */ | |
2772 | if (fun->has_nonlocal_label) | |
2773 | { | |
2774 | reason = G_("function %q+F can never be copied " | |
2775 | "because it receives a non-local goto"); | |
2776 | goto fail; | |
2777 | } | |
2778 | ||
2779 | for (step = fun->local_decls; step; step = TREE_CHAIN (step)) | |
2780 | { | |
2781 | tree decl = TREE_VALUE (step); | |
2782 | ||
2783 | if (TREE_CODE (decl) == VAR_DECL | |
2784 | && TREE_STATIC (decl) | |
2785 | && !DECL_EXTERNAL (decl) | |
2786 | && DECL_INITIAL (decl) | |
2787 | && walk_tree_without_duplicates (&DECL_INITIAL (decl), | |
2788 | has_label_address_in_static_1, | |
2789 | fndecl)) | |
2790 | { | |
2791 | reason = G_("function %q+F can never be copied because it saves " | |
2792 | "address of local label in a static variable"); | |
2793 | goto fail; | |
2794 | } | |
2795 | ||
2796 | if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl) | |
2797 | && variably_modified_type_p (TREE_TYPE (decl), NULL) | |
2798 | && walk_tree_without_duplicates (&TREE_TYPE (decl), | |
2799 | cannot_copy_type_1, NULL)) | |
2800 | { | |
2801 | reason = G_("function %q+F can never be copied " | |
2802 | "because it uses variable sized variables"); | |
2803 | goto fail; | |
2804 | } | |
2805 | } | |
2806 | ||
2807 | fail: | |
2808 | fun->cannot_be_copied_reason = reason; | |
2809 | fun->cannot_be_copied_set = true; | |
2810 | return reason; | |
2811 | } | |
2812 | ||
2813 | ||
2814 | static const char *inline_forbidden_reason; | |
2815 | ||
2816 | /* A callback for walk_gimple_seq to handle statements. Returns non-null | |
2817 | iff a function can not be inlined. Also sets the reason why. */ | |
c986baf6 | 2818 | |
c986baf6 | 2819 | static tree |
726a989a RB |
2820 | inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p, |
2821 | struct walk_stmt_info *wip) | |
c986baf6 | 2822 | { |
726a989a | 2823 | tree fn = (tree) wip->info; |
f08545a8 | 2824 | tree t; |
726a989a | 2825 | gimple stmt = gsi_stmt (*gsi); |
c986baf6 | 2826 | |
726a989a | 2827 | switch (gimple_code (stmt)) |
f08545a8 | 2828 | { |
726a989a | 2829 | case GIMPLE_CALL: |
3197c4fd AS |
2830 | /* Refuse to inline alloca call unless user explicitly forced so as |
2831 | this may change program's memory overhead drastically when the | |
2832 | function using alloca is called in loop. In GCC present in | |
2833 | SPEC2000 inlining into schedule_block cause it to require 2GB of | |
2834 | RAM instead of 256MB. */ | |
726a989a | 2835 | if (gimple_alloca_call_p (stmt) |
f08545a8 JH |
2836 | && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))) |
2837 | { | |
ddd2d57e | 2838 | inline_forbidden_reason |
dee15844 | 2839 | = G_("function %q+F can never be inlined because it uses " |
ddd2d57e | 2840 | "alloca (override using the always_inline attribute)"); |
726a989a RB |
2841 | *handled_ops_p = true; |
2842 | return fn; | |
f08545a8 | 2843 | } |
726a989a RB |
2844 | |
2845 | t = gimple_call_fndecl (stmt); | |
2846 | if (t == NULL_TREE) | |
f08545a8 | 2847 | break; |
84f5e1b1 | 2848 | |
f08545a8 JH |
2849 | /* We cannot inline functions that call setjmp. */ |
2850 | if (setjmp_call_p (t)) | |
2851 | { | |
ddd2d57e | 2852 | inline_forbidden_reason |
dee15844 | 2853 | = G_("function %q+F can never be inlined because it uses setjmp"); |
726a989a RB |
2854 | *handled_ops_p = true; |
2855 | return t; | |
f08545a8 JH |
2856 | } |
2857 | ||
6de9cd9a | 2858 | if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL) |
3197c4fd | 2859 | switch (DECL_FUNCTION_CODE (t)) |
f08545a8 | 2860 | { |
3197c4fd AS |
2861 | /* We cannot inline functions that take a variable number of |
2862 | arguments. */ | |
2863 | case BUILT_IN_VA_START: | |
3197c4fd AS |
2864 | case BUILT_IN_NEXT_ARG: |
2865 | case BUILT_IN_VA_END: | |
6de9cd9a | 2866 | inline_forbidden_reason |
dee15844 | 2867 | = G_("function %q+F can never be inlined because it " |
6de9cd9a | 2868 | "uses variable argument lists"); |
726a989a RB |
2869 | *handled_ops_p = true; |
2870 | return t; | |
6de9cd9a | 2871 | |
3197c4fd | 2872 | case BUILT_IN_LONGJMP: |
6de9cd9a DN |
2873 | /* We can't inline functions that call __builtin_longjmp at |
2874 | all. The non-local goto machinery really requires the | |
2875 | destination be in a different function. If we allow the | |
2876 | function calling __builtin_longjmp to be inlined into the | |
2877 | function calling __builtin_setjmp, Things will Go Awry. */ | |
2878 | inline_forbidden_reason | |
dee15844 | 2879 | = G_("function %q+F can never be inlined because " |
6de9cd9a | 2880 | "it uses setjmp-longjmp exception handling"); |
726a989a RB |
2881 | *handled_ops_p = true; |
2882 | return t; | |
6de9cd9a DN |
2883 | |
2884 | case BUILT_IN_NONLOCAL_GOTO: | |
2885 | /* Similarly. */ | |
2886 | inline_forbidden_reason | |
dee15844 | 2887 | = G_("function %q+F can never be inlined because " |
6de9cd9a | 2888 | "it uses non-local goto"); |
726a989a RB |
2889 | *handled_ops_p = true; |
2890 | return t; | |
f08545a8 | 2891 | |
4b284111 JJ |
2892 | case BUILT_IN_RETURN: |
2893 | case BUILT_IN_APPLY_ARGS: | |
2894 | /* If a __builtin_apply_args caller would be inlined, | |
2895 | it would be saving arguments of the function it has | |
2896 | been inlined into. Similarly __builtin_return would | |
2897 | return from the function the inline has been inlined into. */ | |
2898 | inline_forbidden_reason | |
dee15844 | 2899 | = G_("function %q+F can never be inlined because " |
4b284111 | 2900 | "it uses __builtin_return or __builtin_apply_args"); |
726a989a RB |
2901 | *handled_ops_p = true; |
2902 | return t; | |
4b284111 | 2903 | |
3197c4fd AS |
2904 | default: |
2905 | break; | |
2906 | } | |
f08545a8 JH |
2907 | break; |
2908 | ||
726a989a RB |
2909 | case GIMPLE_GOTO: |
2910 | t = gimple_goto_dest (stmt); | |
f08545a8 JH |
2911 | |
2912 | /* We will not inline a function which uses computed goto. The | |
2913 | addresses of its local labels, which may be tucked into | |
2914 | global storage, are of course not constant across | |
2915 | instantiations, which causes unexpected behavior. */ | |
2916 | if (TREE_CODE (t) != LABEL_DECL) | |
2917 | { | |
ddd2d57e | 2918 | inline_forbidden_reason |
dee15844 | 2919 | = G_("function %q+F can never be inlined " |
ddd2d57e | 2920 | "because it contains a computed goto"); |
726a989a RB |
2921 | *handled_ops_p = true; |
2922 | return t; | |
f08545a8 | 2923 | } |
6de9cd9a | 2924 | break; |
f08545a8 | 2925 | |
f08545a8 JH |
2926 | default: |
2927 | break; | |
2928 | } | |
2929 | ||
726a989a | 2930 | *handled_ops_p = false; |
f08545a8 | 2931 | return NULL_TREE; |
84f5e1b1 RH |
2932 | } |
2933 | ||
726a989a RB |
2934 | /* Return true if FNDECL is a function that cannot be inlined into |
2935 | another one. */ | |
2936 | ||
2937 | static bool | |
f08545a8 | 2938 | inline_forbidden_p (tree fndecl) |
84f5e1b1 | 2939 | { |
2092ee7d | 2940 | struct function *fun = DECL_STRUCT_FUNCTION (fndecl); |
726a989a RB |
2941 | struct walk_stmt_info wi; |
2942 | struct pointer_set_t *visited_nodes; | |
2943 | basic_block bb; | |
2944 | bool forbidden_p = false; | |
2945 | ||
27dbd3ac RH |
2946 | /* First check for shared reasons not to copy the code. */ |
2947 | inline_forbidden_reason = copy_forbidden (fun, fndecl); | |
2948 | if (inline_forbidden_reason != NULL) | |
2949 | return true; | |
2950 | ||
2951 | /* Next, walk the statements of the function looking for | |
2952 | constraucts we can't handle, or are non-optimal for inlining. */ | |
726a989a RB |
2953 | visited_nodes = pointer_set_create (); |
2954 | memset (&wi, 0, sizeof (wi)); | |
2955 | wi.info = (void *) fndecl; | |
2956 | wi.pset = visited_nodes; | |
e21aff8a | 2957 | |
2092ee7d | 2958 | FOR_EACH_BB_FN (bb, fun) |
726a989a RB |
2959 | { |
2960 | gimple ret; | |
2961 | gimple_seq seq = bb_seq (bb); | |
27dbd3ac | 2962 | ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi); |
726a989a RB |
2963 | forbidden_p = (ret != NULL); |
2964 | if (forbidden_p) | |
27dbd3ac | 2965 | break; |
2092ee7d JJ |
2966 | } |
2967 | ||
726a989a | 2968 | pointer_set_destroy (visited_nodes); |
726a989a | 2969 | return forbidden_p; |
84f5e1b1 RH |
2970 | } |
2971 | ||
b3c3af2f SB |
2972 | /* Returns nonzero if FN is a function that does not have any |
2973 | fundamental inline blocking properties. */ | |
d4e4baa9 | 2974 | |
27dbd3ac RH |
2975 | bool |
2976 | tree_inlinable_function_p (tree fn) | |
d4e4baa9 | 2977 | { |
b3c3af2f | 2978 | bool inlinable = true; |
18177c7e RG |
2979 | bool do_warning; |
2980 | tree always_inline; | |
d4e4baa9 AO |
2981 | |
2982 | /* If we've already decided this function shouldn't be inlined, | |
2983 | there's no need to check again. */ | |
2984 | if (DECL_UNINLINABLE (fn)) | |
b3c3af2f | 2985 | return false; |
d4e4baa9 | 2986 | |
18177c7e RG |
2987 | /* We only warn for functions declared `inline' by the user. */ |
2988 | do_warning = (warn_inline | |
18177c7e | 2989 | && DECL_DECLARED_INLINE_P (fn) |
0494626a | 2990 | && !DECL_NO_INLINE_WARNING_P (fn) |
18177c7e RG |
2991 | && !DECL_IN_SYSTEM_HEADER (fn)); |
2992 | ||
2993 | always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)); | |
2994 | ||
e90acd93 | 2995 | if (flag_no_inline |
18177c7e RG |
2996 | && always_inline == NULL) |
2997 | { | |
2998 | if (do_warning) | |
2999 | warning (OPT_Winline, "function %q+F can never be inlined because it " | |
3000 | "is suppressed using -fno-inline", fn); | |
3001 | inlinable = false; | |
3002 | } | |
3003 | ||
3004 | /* Don't auto-inline anything that might not be bound within | |
3005 | this unit of translation. */ | |
3006 | else if (!DECL_DECLARED_INLINE_P (fn) | |
3007 | && DECL_REPLACEABLE_P (fn)) | |
3008 | inlinable = false; | |
3009 | ||
3010 | else if (!function_attribute_inlinable_p (fn)) | |
3011 | { | |
3012 | if (do_warning) | |
3013 | warning (OPT_Winline, "function %q+F can never be inlined because it " | |
3014 | "uses attributes conflicting with inlining", fn); | |
3015 | inlinable = false; | |
3016 | } | |
46c5ad27 | 3017 | |
f08545a8 | 3018 | else if (inline_forbidden_p (fn)) |
b3c3af2f SB |
3019 | { |
3020 | /* See if we should warn about uninlinable functions. Previously, | |
3021 | some of these warnings would be issued while trying to expand | |
3022 | the function inline, but that would cause multiple warnings | |
3023 | about functions that would for example call alloca. But since | |
3024 | this a property of the function, just one warning is enough. | |
3025 | As a bonus we can now give more details about the reason why a | |
18177c7e RG |
3026 | function is not inlinable. */ |
3027 | if (always_inline) | |
dee15844 | 3028 | sorry (inline_forbidden_reason, fn); |
2d327012 | 3029 | else if (do_warning) |
d2fcbf6f | 3030 | warning (OPT_Winline, inline_forbidden_reason, fn); |
b3c3af2f SB |
3031 | |
3032 | inlinable = false; | |
3033 | } | |
d4e4baa9 AO |
3034 | |
3035 | /* Squirrel away the result so that we don't have to check again. */ | |
b3c3af2f | 3036 | DECL_UNINLINABLE (fn) = !inlinable; |
d4e4baa9 | 3037 | |
b3c3af2f SB |
3038 | return inlinable; |
3039 | } | |
3040 | ||
e5c4f28a RG |
3041 | /* Estimate the cost of a memory move. Use machine dependent |
3042 | word size and take possible memcpy call into account. */ | |
3043 | ||
3044 | int | |
3045 | estimate_move_cost (tree type) | |
3046 | { | |
3047 | HOST_WIDE_INT size; | |
3048 | ||
078c3644 JH |
3049 | gcc_assert (!VOID_TYPE_P (type)); |
3050 | ||
e5c4f28a RG |
3051 | size = int_size_in_bytes (type); |
3052 | ||
e04ad03d | 3053 | if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size)) |
e5c4f28a RG |
3054 | /* Cost of a memcpy call, 3 arguments and the call. */ |
3055 | return 4; | |
3056 | else | |
3057 | return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES); | |
3058 | } | |
3059 | ||
726a989a | 3060 | /* Returns cost of operation CODE, according to WEIGHTS */ |
7f9bc51b | 3061 | |
726a989a | 3062 | static int |
02f0b13a JH |
3063 | estimate_operator_cost (enum tree_code code, eni_weights *weights, |
3064 | tree op1 ATTRIBUTE_UNUSED, tree op2) | |
6de9cd9a | 3065 | { |
726a989a | 3066 | switch (code) |
6de9cd9a | 3067 | { |
726a989a RB |
3068 | /* These are "free" conversions, or their presumed cost |
3069 | is folded into other operations. */ | |
61fcaeec | 3070 | case RANGE_EXPR: |
1a87cf0c | 3071 | CASE_CONVERT: |
726a989a RB |
3072 | case COMPLEX_EXPR: |
3073 | case PAREN_EXPR: | |
726a989a | 3074 | return 0; |
6de9cd9a | 3075 | |
e5c4f28a RG |
3076 | /* Assign cost of 1 to usual operations. |
3077 | ??? We may consider mapping RTL costs to this. */ | |
6de9cd9a | 3078 | case COND_EXPR: |
4151978d | 3079 | case VEC_COND_EXPR: |
6de9cd9a DN |
3080 | |
3081 | case PLUS_EXPR: | |
5be014d5 | 3082 | case POINTER_PLUS_EXPR: |
6de9cd9a DN |
3083 | case MINUS_EXPR: |
3084 | case MULT_EXPR: | |
3085 | ||
09e881c9 | 3086 | case ADDR_SPACE_CONVERT_EXPR: |
325217ed | 3087 | case FIXED_CONVERT_EXPR: |
6de9cd9a | 3088 | case FIX_TRUNC_EXPR: |
6de9cd9a DN |
3089 | |
3090 | case NEGATE_EXPR: | |
3091 | case FLOAT_EXPR: | |
3092 | case MIN_EXPR: | |
3093 | case MAX_EXPR: | |
3094 | case ABS_EXPR: | |
3095 | ||
3096 | case LSHIFT_EXPR: | |
3097 | case RSHIFT_EXPR: | |
3098 | case LROTATE_EXPR: | |
3099 | case RROTATE_EXPR: | |
a6b46ba2 DN |
3100 | case VEC_LSHIFT_EXPR: |
3101 | case VEC_RSHIFT_EXPR: | |
6de9cd9a DN |
3102 | |
3103 | case BIT_IOR_EXPR: | |
3104 | case BIT_XOR_EXPR: | |
3105 | case BIT_AND_EXPR: | |
3106 | case BIT_NOT_EXPR: | |
3107 | ||
3108 | case TRUTH_ANDIF_EXPR: | |
3109 | case TRUTH_ORIF_EXPR: | |
3110 | case TRUTH_AND_EXPR: | |
3111 | case TRUTH_OR_EXPR: | |
3112 | case TRUTH_XOR_EXPR: | |
3113 | case TRUTH_NOT_EXPR: | |
3114 | ||
3115 | case LT_EXPR: | |
3116 | case LE_EXPR: | |
3117 | case GT_EXPR: | |
3118 | case GE_EXPR: | |
3119 | case EQ_EXPR: | |
3120 | case NE_EXPR: | |
3121 | case ORDERED_EXPR: | |
3122 | case UNORDERED_EXPR: | |
3123 | ||
3124 | case UNLT_EXPR: | |
3125 | case UNLE_EXPR: | |
3126 | case UNGT_EXPR: | |
3127 | case UNGE_EXPR: | |
3128 | case UNEQ_EXPR: | |
d1a7edaf | 3129 | case LTGT_EXPR: |
6de9cd9a | 3130 | |
6de9cd9a DN |
3131 | case CONJ_EXPR: |
3132 | ||
3133 | case PREDECREMENT_EXPR: | |
3134 | case PREINCREMENT_EXPR: | |
3135 | case POSTDECREMENT_EXPR: | |
3136 | case POSTINCREMENT_EXPR: | |
3137 | ||
16630a2c DN |
3138 | case REALIGN_LOAD_EXPR: |
3139 | ||
61d3cdbb DN |
3140 | case REDUC_MAX_EXPR: |
3141 | case REDUC_MIN_EXPR: | |
3142 | case REDUC_PLUS_EXPR: | |
20f06221 | 3143 | case WIDEN_SUM_EXPR: |
726a989a RB |
3144 | case WIDEN_MULT_EXPR: |
3145 | case DOT_PROD_EXPR: | |
3146 | ||
89d67cca DN |
3147 | case VEC_WIDEN_MULT_HI_EXPR: |
3148 | case VEC_WIDEN_MULT_LO_EXPR: | |
3149 | case VEC_UNPACK_HI_EXPR: | |
3150 | case VEC_UNPACK_LO_EXPR: | |
d9987fb4 UB |
3151 | case VEC_UNPACK_FLOAT_HI_EXPR: |
3152 | case VEC_UNPACK_FLOAT_LO_EXPR: | |
8115817b | 3153 | case VEC_PACK_TRUNC_EXPR: |
89d67cca | 3154 | case VEC_PACK_SAT_EXPR: |
d9987fb4 | 3155 | case VEC_PACK_FIX_TRUNC_EXPR: |
98b44b0e IR |
3156 | case VEC_EXTRACT_EVEN_EXPR: |
3157 | case VEC_EXTRACT_ODD_EXPR: | |
3158 | case VEC_INTERLEAVE_HIGH_EXPR: | |
3159 | case VEC_INTERLEAVE_LOW_EXPR: | |
3160 | ||
726a989a | 3161 | return 1; |
6de9cd9a | 3162 | |
1ea7e6ad | 3163 | /* Few special cases of expensive operations. This is useful |
6de9cd9a DN |
3164 | to avoid inlining on functions having too many of these. */ |
3165 | case TRUNC_DIV_EXPR: | |
3166 | case CEIL_DIV_EXPR: | |
3167 | case FLOOR_DIV_EXPR: | |
3168 | case ROUND_DIV_EXPR: | |
3169 | case EXACT_DIV_EXPR: | |
3170 | case TRUNC_MOD_EXPR: | |
3171 | case CEIL_MOD_EXPR: | |
3172 | case FLOOR_MOD_EXPR: | |
3173 | case ROUND_MOD_EXPR: | |
3174 | case RDIV_EXPR: | |
02f0b13a JH |
3175 | if (TREE_CODE (op2) != INTEGER_CST) |
3176 | return weights->div_mod_cost; | |
3177 | return 1; | |
726a989a RB |
3178 | |
3179 | default: | |
3180 | /* We expect a copy assignment with no operator. */ | |
3181 | gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS); | |
3182 | return 0; | |
3183 | } | |
3184 | } | |
3185 | ||
3186 | ||
3187 | /* Estimate number of instructions that will be created by expanding | |
3188 | the statements in the statement sequence STMTS. | |
3189 | WEIGHTS contains weights attributed to various constructs. */ | |
3190 | ||
3191 | static | |
3192 | int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights) | |
3193 | { | |
3194 | int cost; | |
3195 | gimple_stmt_iterator gsi; | |
3196 | ||
3197 | cost = 0; | |
3198 | for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3199 | cost += estimate_num_insns (gsi_stmt (gsi), weights); | |
3200 | ||
3201 | return cost; | |
3202 | } | |
3203 | ||
3204 | ||
3205 | /* Estimate number of instructions that will be created by expanding STMT. | |
3206 | WEIGHTS contains weights attributed to various constructs. */ | |
3207 | ||
3208 | int | |
3209 | estimate_num_insns (gimple stmt, eni_weights *weights) | |
3210 | { | |
3211 | unsigned cost, i; | |
3212 | enum gimple_code code = gimple_code (stmt); | |
3213 | tree lhs; | |
02f0b13a | 3214 | tree rhs; |
726a989a RB |
3215 | |
3216 | switch (code) | |
3217 | { | |
3218 | case GIMPLE_ASSIGN: | |
3219 | /* Try to estimate the cost of assignments. We have three cases to | |
3220 | deal with: | |
3221 | 1) Simple assignments to registers; | |
3222 | 2) Stores to things that must live in memory. This includes | |
3223 | "normal" stores to scalars, but also assignments of large | |
3224 | structures, or constructors of big arrays; | |
3225 | ||
3226 | Let us look at the first two cases, assuming we have "a = b + C": | |
3227 | <GIMPLE_ASSIGN <var_decl "a"> | |
3228 | <plus_expr <var_decl "b"> <constant C>> | |
3229 | If "a" is a GIMPLE register, the assignment to it is free on almost | |
3230 | any target, because "a" usually ends up in a real register. Hence | |
3231 | the only cost of this expression comes from the PLUS_EXPR, and we | |
3232 | can ignore the GIMPLE_ASSIGN. | |
3233 | If "a" is not a GIMPLE register, the assignment to "a" will most | |
3234 | likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost | |
3235 | of moving something into "a", which we compute using the function | |
3236 | estimate_move_cost. */ | |
3237 | lhs = gimple_assign_lhs (stmt); | |
02f0b13a JH |
3238 | rhs = gimple_assign_rhs1 (stmt); |
3239 | ||
726a989a RB |
3240 | if (is_gimple_reg (lhs)) |
3241 | cost = 0; | |
3242 | else | |
3243 | cost = estimate_move_cost (TREE_TYPE (lhs)); | |
3244 | ||
02f0b13a JH |
3245 | if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs)) |
3246 | cost += estimate_move_cost (TREE_TYPE (rhs)); | |
3247 | ||
3248 | cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights, | |
3249 | gimple_assign_rhs1 (stmt), | |
3250 | get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
3251 | == GIMPLE_BINARY_RHS | |
3252 | ? gimple_assign_rhs2 (stmt) : NULL); | |
726a989a RB |
3253 | break; |
3254 | ||
3255 | case GIMPLE_COND: | |
02f0b13a JH |
3256 | cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights, |
3257 | gimple_op (stmt, 0), | |
3258 | gimple_op (stmt, 1)); | |
726a989a RB |
3259 | break; |
3260 | ||
3261 | case GIMPLE_SWITCH: | |
3262 | /* Take into account cost of the switch + guess 2 conditional jumps for | |
3263 | each case label. | |
3264 | ||
3265 | TODO: once the switch expansion logic is sufficiently separated, we can | |
3266 | do better job on estimating cost of the switch. */ | |
02f0b13a JH |
3267 | if (weights->time_based) |
3268 | cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2; | |
3269 | else | |
3270 | cost = gimple_switch_num_labels (stmt) * 2; | |
6de9cd9a | 3271 | break; |
726a989a RB |
3272 | |
3273 | case GIMPLE_CALL: | |
6de9cd9a | 3274 | { |
726a989a RB |
3275 | tree decl = gimple_call_fndecl (stmt); |
3276 | tree addr = gimple_call_fn (stmt); | |
8723e2fe JH |
3277 | tree funtype = TREE_TYPE (addr); |
3278 | ||
726a989a RB |
3279 | if (POINTER_TYPE_P (funtype)) |
3280 | funtype = TREE_TYPE (funtype); | |
6de9cd9a | 3281 | |
625a2efb | 3282 | if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD) |
726a989a | 3283 | cost = weights->target_builtin_call_cost; |
625a2efb | 3284 | else |
726a989a | 3285 | cost = weights->call_cost; |
625a2efb | 3286 | |
8c96cd51 | 3287 | if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL) |
6de9cd9a DN |
3288 | switch (DECL_FUNCTION_CODE (decl)) |
3289 | { | |
3290 | case BUILT_IN_CONSTANT_P: | |
726a989a | 3291 | return 0; |
6de9cd9a | 3292 | case BUILT_IN_EXPECT: |
02f0b13a | 3293 | return 0; |
726a989a | 3294 | |
7f9bc51b ZD |
3295 | /* Prefetch instruction is not expensive. */ |
3296 | case BUILT_IN_PREFETCH: | |
726a989a | 3297 | cost = weights->target_builtin_call_cost; |
7f9bc51b | 3298 | break; |
726a989a | 3299 | |
6de9cd9a DN |
3300 | default: |
3301 | break; | |
3302 | } | |
e5c4f28a | 3303 | |
8723e2fe JH |
3304 | if (decl) |
3305 | funtype = TREE_TYPE (decl); | |
3306 | ||
02f0b13a JH |
3307 | if (!VOID_TYPE_P (TREE_TYPE (funtype))) |
3308 | cost += estimate_move_cost (TREE_TYPE (funtype)); | |
726a989a RB |
3309 | /* Our cost must be kept in sync with |
3310 | cgraph_estimate_size_after_inlining that does use function | |
3311 | declaration to figure out the arguments. */ | |
8723e2fe JH |
3312 | if (decl && DECL_ARGUMENTS (decl)) |
3313 | { | |
3314 | tree arg; | |
3315 | for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg)) | |
078c3644 JH |
3316 | if (!VOID_TYPE_P (TREE_TYPE (arg))) |
3317 | cost += estimate_move_cost (TREE_TYPE (arg)); | |
8723e2fe JH |
3318 | } |
3319 | else if (funtype && prototype_p (funtype)) | |
3320 | { | |
3321 | tree t; | |
078c3644 JH |
3322 | for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node; |
3323 | t = TREE_CHAIN (t)) | |
3324 | if (!VOID_TYPE_P (TREE_VALUE (t))) | |
3325 | cost += estimate_move_cost (TREE_VALUE (t)); | |
8723e2fe JH |
3326 | } |
3327 | else | |
c7f599d0 | 3328 | { |
726a989a RB |
3329 | for (i = 0; i < gimple_call_num_args (stmt); i++) |
3330 | { | |
3331 | tree arg = gimple_call_arg (stmt, i); | |
078c3644 JH |
3332 | if (!VOID_TYPE_P (TREE_TYPE (arg))) |
3333 | cost += estimate_move_cost (TREE_TYPE (arg)); | |
726a989a | 3334 | } |
c7f599d0 | 3335 | } |
e5c4f28a | 3336 | |
6de9cd9a DN |
3337 | break; |
3338 | } | |
88f4034b | 3339 | |
726a989a RB |
3340 | case GIMPLE_GOTO: |
3341 | case GIMPLE_LABEL: | |
3342 | case GIMPLE_NOP: | |
3343 | case GIMPLE_PHI: | |
3344 | case GIMPLE_RETURN: | |
726a989a | 3345 | case GIMPLE_PREDICT: |
b5b8b0ac | 3346 | case GIMPLE_DEBUG: |
726a989a RB |
3347 | return 0; |
3348 | ||
3349 | case GIMPLE_ASM: | |
2bd1d2c8 | 3350 | return asm_str_count (gimple_asm_string (stmt)); |
726a989a | 3351 | |
1d65f45c RH |
3352 | case GIMPLE_RESX: |
3353 | /* This is either going to be an external function call with one | |
3354 | argument, or two register copy statements plus a goto. */ | |
3355 | return 2; | |
3356 | ||
3357 | case GIMPLE_EH_DISPATCH: | |
3358 | /* ??? This is going to turn into a switch statement. Ideally | |
3359 | we'd have a look at the eh region and estimate the number of | |
3360 | edges involved. */ | |
3361 | return 10; | |
3362 | ||
726a989a RB |
3363 | case GIMPLE_BIND: |
3364 | return estimate_num_insns_seq (gimple_bind_body (stmt), weights); | |
3365 | ||
3366 | case GIMPLE_EH_FILTER: | |
3367 | return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights); | |
3368 | ||
3369 | case GIMPLE_CATCH: | |
3370 | return estimate_num_insns_seq (gimple_catch_handler (stmt), weights); | |
3371 | ||
3372 | case GIMPLE_TRY: | |
3373 | return (estimate_num_insns_seq (gimple_try_eval (stmt), weights) | |
3374 | + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights)); | |
3375 | ||
3376 | /* OpenMP directives are generally very expensive. */ | |
3377 | ||
3378 | case GIMPLE_OMP_RETURN: | |
3379 | case GIMPLE_OMP_SECTIONS_SWITCH: | |
3380 | case GIMPLE_OMP_ATOMIC_STORE: | |
3381 | case GIMPLE_OMP_CONTINUE: | |
3382 | /* ...except these, which are cheap. */ | |
3383 | return 0; | |
3384 | ||
3385 | case GIMPLE_OMP_ATOMIC_LOAD: | |
3386 | return weights->omp_cost; | |
3387 | ||
3388 | case GIMPLE_OMP_FOR: | |
3389 | return (weights->omp_cost | |
3390 | + estimate_num_insns_seq (gimple_omp_body (stmt), weights) | |
3391 | + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights)); | |
3392 | ||
3393 | case GIMPLE_OMP_PARALLEL: | |
3394 | case GIMPLE_OMP_TASK: | |
3395 | case GIMPLE_OMP_CRITICAL: | |
3396 | case GIMPLE_OMP_MASTER: | |
3397 | case GIMPLE_OMP_ORDERED: | |
3398 | case GIMPLE_OMP_SECTION: | |
3399 | case GIMPLE_OMP_SECTIONS: | |
3400 | case GIMPLE_OMP_SINGLE: | |
3401 | return (weights->omp_cost | |
3402 | + estimate_num_insns_seq (gimple_omp_body (stmt), weights)); | |
88f4034b | 3403 | |
6de9cd9a | 3404 | default: |
1e128c5f | 3405 | gcc_unreachable (); |
6de9cd9a | 3406 | } |
726a989a RB |
3407 | |
3408 | return cost; | |
6de9cd9a DN |
3409 | } |
3410 | ||
726a989a RB |
3411 | /* Estimate number of instructions that will be created by expanding |
3412 | function FNDECL. WEIGHTS contains weights attributed to various | |
3413 | constructs. */ | |
aa4a53af | 3414 | |
6de9cd9a | 3415 | int |
726a989a | 3416 | estimate_num_insns_fn (tree fndecl, eni_weights *weights) |
6de9cd9a | 3417 | { |
726a989a RB |
3418 | struct function *my_function = DECL_STRUCT_FUNCTION (fndecl); |
3419 | gimple_stmt_iterator bsi; | |
e21aff8a | 3420 | basic_block bb; |
726a989a | 3421 | int n = 0; |
e21aff8a | 3422 | |
726a989a RB |
3423 | gcc_assert (my_function && my_function->cfg); |
3424 | FOR_EACH_BB_FN (bb, my_function) | |
e21aff8a | 3425 | { |
726a989a RB |
3426 | for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) |
3427 | n += estimate_num_insns (gsi_stmt (bsi), weights); | |
e21aff8a | 3428 | } |
e21aff8a | 3429 | |
726a989a | 3430 | return n; |
7f9bc51b ZD |
3431 | } |
3432 | ||
726a989a | 3433 | |
7f9bc51b ZD |
3434 | /* Initializes weights used by estimate_num_insns. */ |
3435 | ||
3436 | void | |
3437 | init_inline_once (void) | |
3438 | { | |
7f9bc51b | 3439 | eni_size_weights.call_cost = 1; |
625a2efb | 3440 | eni_size_weights.target_builtin_call_cost = 1; |
7f9bc51b | 3441 | eni_size_weights.div_mod_cost = 1; |
7f9bc51b | 3442 | eni_size_weights.omp_cost = 40; |
02f0b13a | 3443 | eni_size_weights.time_based = false; |
7f9bc51b ZD |
3444 | |
3445 | /* Estimating time for call is difficult, since we have no idea what the | |
3446 | called function does. In the current uses of eni_time_weights, | |
3447 | underestimating the cost does less harm than overestimating it, so | |
ea2c620c | 3448 | we choose a rather small value here. */ |
7f9bc51b | 3449 | eni_time_weights.call_cost = 10; |
625a2efb | 3450 | eni_time_weights.target_builtin_call_cost = 10; |
7f9bc51b | 3451 | eni_time_weights.div_mod_cost = 10; |
7f9bc51b | 3452 | eni_time_weights.omp_cost = 40; |
02f0b13a | 3453 | eni_time_weights.time_based = true; |
6de9cd9a DN |
3454 | } |
3455 | ||
726a989a RB |
3456 | /* Estimate the number of instructions in a gimple_seq. */ |
3457 | ||
3458 | int | |
3459 | count_insns_seq (gimple_seq seq, eni_weights *weights) | |
3460 | { | |
3461 | gimple_stmt_iterator gsi; | |
3462 | int n = 0; | |
3463 | for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3464 | n += estimate_num_insns (gsi_stmt (gsi), weights); | |
3465 | ||
3466 | return n; | |
3467 | } | |
3468 | ||
3469 | ||
e21aff8a | 3470 | /* Install new lexical TREE_BLOCK underneath 'current_block'. */ |
726a989a | 3471 | |
e21aff8a | 3472 | static void |
4a283090 | 3473 | prepend_lexical_block (tree current_block, tree new_block) |
e21aff8a | 3474 | { |
4a283090 JH |
3475 | BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block); |
3476 | BLOCK_SUBBLOCKS (current_block) = new_block; | |
e21aff8a | 3477 | BLOCK_SUPERCONTEXT (new_block) = current_block; |
e21aff8a SB |
3478 | } |
3479 | ||
3e293154 MJ |
3480 | /* Fetch callee declaration from the call graph edge going from NODE and |
3481 | associated with STMR call statement. Return NULL_TREE if not found. */ | |
3482 | static tree | |
726a989a | 3483 | get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt) |
3e293154 MJ |
3484 | { |
3485 | struct cgraph_edge *cs; | |
3486 | ||
3487 | cs = cgraph_edge (node, stmt); | |
3488 | if (cs) | |
3489 | return cs->callee->decl; | |
3490 | ||
3491 | return NULL_TREE; | |
3492 | } | |
3493 | ||
726a989a | 3494 | /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */ |
d4e4baa9 | 3495 | |
e21aff8a | 3496 | static bool |
726a989a | 3497 | expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id) |
d4e4baa9 | 3498 | { |
1ea193c2 | 3499 | tree retvar, use_retvar; |
d436bff8 | 3500 | tree fn; |
b5b8b0ac | 3501 | struct pointer_map_t *st, *dst; |
110cfe1c | 3502 | tree return_slot; |
7740f00d | 3503 | tree modify_dest; |
6de9cd9a | 3504 | location_t saved_location; |
e21aff8a | 3505 | struct cgraph_edge *cg_edge; |
61a05df1 | 3506 | cgraph_inline_failed_t reason; |
e21aff8a SB |
3507 | basic_block return_block; |
3508 | edge e; | |
726a989a | 3509 | gimple_stmt_iterator gsi, stmt_gsi; |
e21aff8a | 3510 | bool successfully_inlined = FALSE; |
4f6c2131 | 3511 | bool purge_dead_abnormal_edges; |
e21aff8a SB |
3512 | tree t_step; |
3513 | tree var; | |
d4e4baa9 | 3514 | |
6de9cd9a DN |
3515 | /* Set input_location here so we get the right instantiation context |
3516 | if we call instantiate_decl from inlinable_function_p. */ | |
3517 | saved_location = input_location; | |
726a989a RB |
3518 | if (gimple_has_location (stmt)) |
3519 | input_location = gimple_location (stmt); | |
6de9cd9a | 3520 | |
d4e4baa9 | 3521 | /* From here on, we're only interested in CALL_EXPRs. */ |
726a989a | 3522 | if (gimple_code (stmt) != GIMPLE_CALL) |
6de9cd9a | 3523 | goto egress; |
d4e4baa9 AO |
3524 | |
3525 | /* First, see if we can figure out what function is being called. | |
3526 | If we cannot, then there is no hope of inlining the function. */ | |
726a989a | 3527 | fn = gimple_call_fndecl (stmt); |
d4e4baa9 | 3528 | if (!fn) |
3e293154 MJ |
3529 | { |
3530 | fn = get_indirect_callee_fndecl (id->dst_node, stmt); | |
3531 | if (!fn) | |
3532 | goto egress; | |
3533 | } | |
d4e4baa9 | 3534 | |
b58b1157 | 3535 | /* Turn forward declarations into real ones. */ |
d4d1ebc1 | 3536 | fn = cgraph_node (fn)->decl; |
b58b1157 | 3537 | |
726a989a | 3538 | /* If FN is a declaration of a function in a nested scope that was |
a1a0fd4e AO |
3539 | globally declared inline, we don't set its DECL_INITIAL. |
3540 | However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the | |
3541 | C++ front-end uses it for cdtors to refer to their internal | |
3542 | declarations, that are not real functions. Fortunately those | |
3543 | don't have trees to be saved, so we can tell by checking their | |
726a989a RB |
3544 | gimple_body. */ |
3545 | if (!DECL_INITIAL (fn) | |
a1a0fd4e | 3546 | && DECL_ABSTRACT_ORIGIN (fn) |
39ecc018 | 3547 | && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn))) |
a1a0fd4e AO |
3548 | fn = DECL_ABSTRACT_ORIGIN (fn); |
3549 | ||
18c6ada9 JH |
3550 | /* Objective C and fortran still calls tree_rest_of_compilation directly. |
3551 | Kill this check once this is fixed. */ | |
1b369fae | 3552 | if (!id->dst_node->analyzed) |
6de9cd9a | 3553 | goto egress; |
18c6ada9 | 3554 | |
1b369fae | 3555 | cg_edge = cgraph_edge (id->dst_node, stmt); |
18c6ada9 | 3556 | |
f9417da1 RG |
3557 | /* Don't inline functions with different EH personalities. */ |
3558 | if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl) | |
3559 | && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl) | |
3560 | && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl) | |
3561 | != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))) | |
3562 | goto egress; | |
3563 | ||
d4e4baa9 AO |
3564 | /* Don't try to inline functions that are not well-suited to |
3565 | inlining. */ | |
e21aff8a | 3566 | if (!cgraph_inline_p (cg_edge, &reason)) |
a833faa5 | 3567 | { |
3e293154 MJ |
3568 | /* If this call was originally indirect, we do not want to emit any |
3569 | inlining related warnings or sorry messages because there are no | |
3570 | guarantees regarding those. */ | |
3571 | if (cg_edge->indirect_call) | |
3572 | goto egress; | |
3573 | ||
7fac66d4 JH |
3574 | if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)) |
3575 | /* Avoid warnings during early inline pass. */ | |
7e8b322a | 3576 | && cgraph_global_info_ready) |
2d327012 | 3577 | { |
61a05df1 JH |
3578 | sorry ("inlining failed in call to %q+F: %s", fn, |
3579 | cgraph_inline_failed_string (reason)); | |
2d327012 JH |
3580 | sorry ("called from here"); |
3581 | } | |
3582 | else if (warn_inline && DECL_DECLARED_INLINE_P (fn) | |
3583 | && !DECL_IN_SYSTEM_HEADER (fn) | |
61a05df1 | 3584 | && reason != CIF_UNSPECIFIED |
d63db217 JH |
3585 | && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)) |
3586 | /* Avoid warnings during early inline pass. */ | |
7e8b322a | 3587 | && cgraph_global_info_ready) |
a833faa5 | 3588 | { |
dee15844 | 3589 | warning (OPT_Winline, "inlining failed in call to %q+F: %s", |
61a05df1 | 3590 | fn, cgraph_inline_failed_string (reason)); |
3176a0c2 | 3591 | warning (OPT_Winline, "called from here"); |
a833faa5 | 3592 | } |
6de9cd9a | 3593 | goto egress; |
a833faa5 | 3594 | } |
ea99e0be | 3595 | fn = cg_edge->callee->decl; |
d4e4baa9 | 3596 | |
18c6ada9 | 3597 | #ifdef ENABLE_CHECKING |
1b369fae | 3598 | if (cg_edge->callee->decl != id->dst_node->decl) |
e21aff8a | 3599 | verify_cgraph_node (cg_edge->callee); |
18c6ada9 JH |
3600 | #endif |
3601 | ||
e21aff8a | 3602 | /* We will be inlining this callee. */ |
1d65f45c | 3603 | id->eh_lp_nr = lookup_stmt_eh_lp (stmt); |
e21aff8a | 3604 | |
f9417da1 RG |
3605 | /* Update the callers EH personality. */ |
3606 | if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)) | |
3607 | DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl) | |
3608 | = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl); | |
3609 | ||
726a989a | 3610 | /* Split the block holding the GIMPLE_CALL. */ |
e21aff8a SB |
3611 | e = split_block (bb, stmt); |
3612 | bb = e->src; | |
3613 | return_block = e->dest; | |
3614 | remove_edge (e); | |
3615 | ||
4f6c2131 EB |
3616 | /* split_block splits after the statement; work around this by |
3617 | moving the call into the second block manually. Not pretty, | |
3618 | but seems easier than doing the CFG manipulation by hand | |
726a989a RB |
3619 | when the GIMPLE_CALL is in the last statement of BB. */ |
3620 | stmt_gsi = gsi_last_bb (bb); | |
3621 | gsi_remove (&stmt_gsi, false); | |
4f6c2131 | 3622 | |
726a989a | 3623 | /* If the GIMPLE_CALL was in the last statement of BB, it may have |
4f6c2131 EB |
3624 | been the source of abnormal edges. In this case, schedule |
3625 | the removal of dead abnormal edges. */ | |
726a989a RB |
3626 | gsi = gsi_start_bb (return_block); |
3627 | if (gsi_end_p (gsi)) | |
e21aff8a | 3628 | { |
726a989a | 3629 | gsi_insert_after (&gsi, stmt, GSI_NEW_STMT); |
4f6c2131 | 3630 | purge_dead_abnormal_edges = true; |
e21aff8a | 3631 | } |
4f6c2131 EB |
3632 | else |
3633 | { | |
726a989a | 3634 | gsi_insert_before (&gsi, stmt, GSI_NEW_STMT); |
4f6c2131 EB |
3635 | purge_dead_abnormal_edges = false; |
3636 | } | |
3637 | ||
726a989a | 3638 | stmt_gsi = gsi_start_bb (return_block); |
742a37d5 | 3639 | |
d436bff8 AH |
3640 | /* Build a block containing code to initialize the arguments, the |
3641 | actual inline expansion of the body, and a label for the return | |
3642 | statements within the function to jump to. The type of the | |
3643 | statement expression is the return type of the function call. */ | |
e21aff8a SB |
3644 | id->block = make_node (BLOCK); |
3645 | BLOCK_ABSTRACT_ORIGIN (id->block) = fn; | |
3e2844cb | 3646 | BLOCK_SOURCE_LOCATION (id->block) = input_location; |
4a283090 | 3647 | prepend_lexical_block (gimple_block (stmt), id->block); |
e21aff8a | 3648 | |
d4e4baa9 AO |
3649 | /* Local declarations will be replaced by their equivalents in this |
3650 | map. */ | |
3651 | st = id->decl_map; | |
6be42dd4 | 3652 | id->decl_map = pointer_map_create (); |
b5b8b0ac AO |
3653 | dst = id->debug_map; |
3654 | id->debug_map = NULL; | |
d4e4baa9 | 3655 | |
e21aff8a | 3656 | /* Record the function we are about to inline. */ |
1b369fae RH |
3657 | id->src_fn = fn; |
3658 | id->src_node = cg_edge->callee; | |
110cfe1c | 3659 | id->src_cfun = DECL_STRUCT_FUNCTION (fn); |
726a989a | 3660 | id->gimple_call = stmt; |
1b369fae | 3661 | |
3c8da8a5 AO |
3662 | gcc_assert (!id->src_cfun->after_inlining); |
3663 | ||
045685a9 | 3664 | id->entry_bb = bb; |
7299cb99 JH |
3665 | if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn))) |
3666 | { | |
3667 | gimple_stmt_iterator si = gsi_last_bb (bb); | |
3668 | gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION, | |
3669 | NOT_TAKEN), | |
3670 | GSI_NEW_STMT); | |
3671 | } | |
726a989a | 3672 | initialize_inlined_parameters (id, stmt, fn, bb); |
d4e4baa9 | 3673 | |
ea99e0be | 3674 | if (DECL_INITIAL (fn)) |
4a283090 | 3675 | prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id)); |
acb8f212 | 3676 | |
d4e4baa9 AO |
3677 | /* Return statements in the function body will be replaced by jumps |
3678 | to the RET_LABEL. */ | |
1e128c5f GB |
3679 | gcc_assert (DECL_INITIAL (fn)); |
3680 | gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK); | |
23700f65 | 3681 | |
726a989a | 3682 | /* Find the LHS to which the result of this call is assigned. */ |
110cfe1c | 3683 | return_slot = NULL; |
726a989a | 3684 | if (gimple_call_lhs (stmt)) |
81bafd36 | 3685 | { |
726a989a | 3686 | modify_dest = gimple_call_lhs (stmt); |
81bafd36 ILT |
3687 | |
3688 | /* The function which we are inlining might not return a value, | |
3689 | in which case we should issue a warning that the function | |
3690 | does not return a value. In that case the optimizers will | |
3691 | see that the variable to which the value is assigned was not | |
3692 | initialized. We do not want to issue a warning about that | |
3693 | uninitialized variable. */ | |
3694 | if (DECL_P (modify_dest)) | |
3695 | TREE_NO_WARNING (modify_dest) = 1; | |
726a989a RB |
3696 | |
3697 | if (gimple_call_return_slot_opt_p (stmt)) | |
fa47911c | 3698 | { |
110cfe1c | 3699 | return_slot = modify_dest; |
fa47911c JM |
3700 | modify_dest = NULL; |
3701 | } | |
81bafd36 | 3702 | } |
7740f00d RH |
3703 | else |
3704 | modify_dest = NULL; | |
3705 | ||
1ea193c2 ILT |
3706 | /* If we are inlining a call to the C++ operator new, we don't want |
3707 | to use type based alias analysis on the return value. Otherwise | |
3708 | we may get confused if the compiler sees that the inlined new | |
3709 | function returns a pointer which was just deleted. See bug | |
3710 | 33407. */ | |
3711 | if (DECL_IS_OPERATOR_NEW (fn)) | |
3712 | { | |
3713 | return_slot = NULL; | |
3714 | modify_dest = NULL; | |
3715 | } | |
3716 | ||
d4e4baa9 | 3717 | /* Declare the return variable for the function. */ |
726a989a | 3718 | retvar = declare_return_variable (id, return_slot, modify_dest, &use_retvar); |
1ea193c2 | 3719 | |
acb8f212 | 3720 | /* Add local vars in this inlined callee to caller. */ |
cb91fab0 | 3721 | t_step = id->src_cfun->local_decls; |
acb8f212 JH |
3722 | for (; t_step; t_step = TREE_CHAIN (t_step)) |
3723 | { | |
3724 | var = TREE_VALUE (t_step); | |
3725 | if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var)) | |
eb50f5f4 | 3726 | { |
65401a0b | 3727 | if (var_ann (var) && add_referenced_var (var)) |
eb50f5f4 JH |
3728 | cfun->local_decls = tree_cons (NULL_TREE, var, |
3729 | cfun->local_decls); | |
3730 | } | |
526d73ab JH |
3731 | else if (!can_be_nonlocal (var, id)) |
3732 | cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id), | |
3733 | cfun->local_decls); | |
acb8f212 JH |
3734 | } |
3735 | ||
eb50f5f4 JH |
3736 | /* This is it. Duplicate the callee body. Assume callee is |
3737 | pre-gimplified. Note that we must not alter the caller | |
3738 | function in any way before this point, as this CALL_EXPR may be | |
3739 | a self-referential call; if we're calling ourselves, we need to | |
3740 | duplicate our body before altering anything. */ | |
3741 | copy_body (id, bb->count, bb->frequency, bb, return_block); | |
3742 | ||
6b8ed145 RG |
3743 | /* Reset the escaped and callused solutions. */ |
3744 | if (cfun->gimple_df) | |
3745 | { | |
3746 | pt_solution_reset (&cfun->gimple_df->escaped); | |
3747 | pt_solution_reset (&cfun->gimple_df->callused); | |
3748 | } | |
3749 | ||
d4e4baa9 | 3750 | /* Clean up. */ |
b5b8b0ac AO |
3751 | if (id->debug_map) |
3752 | { | |
3753 | pointer_map_destroy (id->debug_map); | |
3754 | id->debug_map = dst; | |
3755 | } | |
6be42dd4 | 3756 | pointer_map_destroy (id->decl_map); |
d4e4baa9 AO |
3757 | id->decl_map = st; |
3758 | ||
5006671f RG |
3759 | /* Unlink the calls virtual operands before replacing it. */ |
3760 | unlink_stmt_vdef (stmt); | |
3761 | ||
84936f6f | 3762 | /* If the inlined function returns a result that we care about, |
726a989a RB |
3763 | substitute the GIMPLE_CALL with an assignment of the return |
3764 | variable to the LHS of the call. That is, if STMT was | |
3765 | 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */ | |
3766 | if (use_retvar && gimple_call_lhs (stmt)) | |
e21aff8a | 3767 | { |
726a989a RB |
3768 | gimple old_stmt = stmt; |
3769 | stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar); | |
3770 | gsi_replace (&stmt_gsi, stmt, false); | |
110cfe1c | 3771 | if (gimple_in_ssa_p (cfun)) |
5006671f | 3772 | mark_symbols_for_renaming (stmt); |
726a989a | 3773 | maybe_clean_or_replace_eh_stmt (old_stmt, stmt); |
e21aff8a | 3774 | } |
6de9cd9a | 3775 | else |
110cfe1c | 3776 | { |
726a989a RB |
3777 | /* Handle the case of inlining a function with no return |
3778 | statement, which causes the return value to become undefined. */ | |
3779 | if (gimple_call_lhs (stmt) | |
3780 | && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME) | |
110cfe1c | 3781 | { |
726a989a RB |
3782 | tree name = gimple_call_lhs (stmt); |
3783 | tree var = SSA_NAME_VAR (name); | |
110cfe1c JH |
3784 | tree def = gimple_default_def (cfun, var); |
3785 | ||
110cfe1c JH |
3786 | if (def) |
3787 | { | |
726a989a RB |
3788 | /* If the variable is used undefined, make this name |
3789 | undefined via a move. */ | |
3790 | stmt = gimple_build_assign (gimple_call_lhs (stmt), def); | |
3791 | gsi_replace (&stmt_gsi, stmt, true); | |
110cfe1c | 3792 | } |
110cfe1c JH |
3793 | else |
3794 | { | |
726a989a RB |
3795 | /* Otherwise make this variable undefined. */ |
3796 | gsi_remove (&stmt_gsi, true); | |
110cfe1c | 3797 | set_default_def (var, name); |
726a989a | 3798 | SSA_NAME_DEF_STMT (name) = gimple_build_nop (); |
110cfe1c JH |
3799 | } |
3800 | } | |
3801 | else | |
726a989a | 3802 | gsi_remove (&stmt_gsi, true); |
110cfe1c | 3803 | } |
d4e4baa9 | 3804 | |
4f6c2131 | 3805 | if (purge_dead_abnormal_edges) |
726a989a | 3806 | gimple_purge_dead_abnormal_call_edges (return_block); |
84936f6f | 3807 | |
e21aff8a SB |
3808 | /* If the value of the new expression is ignored, that's OK. We |
3809 | don't warn about this for CALL_EXPRs, so we shouldn't warn about | |
3810 | the equivalent inlined version either. */ | |
726a989a RB |
3811 | if (is_gimple_assign (stmt)) |
3812 | { | |
3813 | gcc_assert (gimple_assign_single_p (stmt) | |
1a87cf0c | 3814 | || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))); |
726a989a RB |
3815 | TREE_USED (gimple_assign_rhs1 (stmt)) = 1; |
3816 | } | |
84936f6f | 3817 | |
1eb3331e DB |
3818 | /* Output the inlining info for this abstract function, since it has been |
3819 | inlined. If we don't do this now, we can lose the information about the | |
3820 | variables in the function when the blocks get blown away as soon as we | |
3821 | remove the cgraph node. */ | |
e21aff8a | 3822 | (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl); |
84936f6f | 3823 | |
e72fcfe8 | 3824 | /* Update callgraph if needed. */ |
e21aff8a | 3825 | cgraph_remove_node (cg_edge->callee); |
e72fcfe8 | 3826 | |
e21aff8a | 3827 | id->block = NULL_TREE; |
e21aff8a | 3828 | successfully_inlined = TRUE; |
742a37d5 | 3829 | |
6de9cd9a DN |
3830 | egress: |
3831 | input_location = saved_location; | |
e21aff8a | 3832 | return successfully_inlined; |
d4e4baa9 | 3833 | } |
6de9cd9a | 3834 | |
e21aff8a SB |
3835 | /* Expand call statements reachable from STMT_P. |
3836 | We can only have CALL_EXPRs as the "toplevel" tree code or nested | |
726a989a | 3837 | in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can |
e21aff8a SB |
3838 | unfortunately not use that function here because we need a pointer |
3839 | to the CALL_EXPR, not the tree itself. */ | |
3840 | ||
3841 | static bool | |
1b369fae | 3842 | gimple_expand_calls_inline (basic_block bb, copy_body_data *id) |
6de9cd9a | 3843 | { |
726a989a | 3844 | gimple_stmt_iterator gsi; |
6de9cd9a | 3845 | |
726a989a | 3846 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
6de9cd9a | 3847 | { |
726a989a | 3848 | gimple stmt = gsi_stmt (gsi); |
e21aff8a | 3849 | |
726a989a RB |
3850 | if (is_gimple_call (stmt) |
3851 | && expand_call_inline (bb, stmt, id)) | |
3852 | return true; | |
6de9cd9a | 3853 | } |
726a989a | 3854 | |
e21aff8a | 3855 | return false; |
6de9cd9a DN |
3856 | } |
3857 | ||
726a989a | 3858 | |
b8a00a4d JH |
3859 | /* Walk all basic blocks created after FIRST and try to fold every statement |
3860 | in the STATEMENTS pointer set. */ | |
726a989a | 3861 | |
b8a00a4d JH |
3862 | static void |
3863 | fold_marked_statements (int first, struct pointer_set_t *statements) | |
3864 | { | |
726a989a | 3865 | for (; first < n_basic_blocks; first++) |
b8a00a4d JH |
3866 | if (BASIC_BLOCK (first)) |
3867 | { | |
726a989a RB |
3868 | gimple_stmt_iterator gsi; |
3869 | ||
3870 | for (gsi = gsi_start_bb (BASIC_BLOCK (first)); | |
3871 | !gsi_end_p (gsi); | |
3872 | gsi_next (&gsi)) | |
3873 | if (pointer_set_contains (statements, gsi_stmt (gsi))) | |
9477eb38 | 3874 | { |
726a989a | 3875 | gimple old_stmt = gsi_stmt (gsi); |
4b685e14 | 3876 | tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0; |
2bafad93 | 3877 | |
44e10129 MM |
3878 | if (old_decl && DECL_BUILT_IN (old_decl)) |
3879 | { | |
3880 | /* Folding builtins can create multiple instructions, | |
3881 | we need to look at all of them. */ | |
3882 | gimple_stmt_iterator i2 = gsi; | |
3883 | gsi_prev (&i2); | |
3884 | if (fold_stmt (&gsi)) | |
3885 | { | |
3886 | gimple new_stmt; | |
3887 | if (gsi_end_p (i2)) | |
3888 | i2 = gsi_start_bb (BASIC_BLOCK (first)); | |
3889 | else | |
3890 | gsi_next (&i2); | |
3891 | while (1) | |
3892 | { | |
3893 | new_stmt = gsi_stmt (i2); | |
3894 | update_stmt (new_stmt); | |
3895 | cgraph_update_edges_for_call_stmt (old_stmt, old_decl, | |
3896 | new_stmt); | |
3897 | ||
3898 | if (new_stmt == gsi_stmt (gsi)) | |
3899 | { | |
3900 | /* It is okay to check only for the very last | |
3901 | of these statements. If it is a throwing | |
3902 | statement nothing will change. If it isn't | |
3903 | this can remove EH edges. If that weren't | |
3904 | correct then because some intermediate stmts | |
3905 | throw, but not the last one. That would mean | |
3906 | we'd have to split the block, which we can't | |
3907 | here and we'd loose anyway. And as builtins | |
3908 | probably never throw, this all | |
3909 | is mood anyway. */ | |
3910 | if (maybe_clean_or_replace_eh_stmt (old_stmt, | |
3911 | new_stmt)) | |
3912 | gimple_purge_dead_eh_edges (BASIC_BLOCK (first)); | |
3913 | break; | |
3914 | } | |
3915 | gsi_next (&i2); | |
3916 | } | |
3917 | } | |
3918 | } | |
3919 | else if (fold_stmt (&gsi)) | |
9477eb38 | 3920 | { |
726a989a RB |
3921 | /* Re-read the statement from GSI as fold_stmt() may |
3922 | have changed it. */ | |
3923 | gimple new_stmt = gsi_stmt (gsi); | |
3924 | update_stmt (new_stmt); | |
3925 | ||
4b685e14 JH |
3926 | if (is_gimple_call (old_stmt) |
3927 | || is_gimple_call (new_stmt)) | |
44e10129 MM |
3928 | cgraph_update_edges_for_call_stmt (old_stmt, old_decl, |
3929 | new_stmt); | |
726a989a RB |
3930 | |
3931 | if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt)) | |
3932 | gimple_purge_dead_eh_edges (BASIC_BLOCK (first)); | |
9477eb38 JH |
3933 | } |
3934 | } | |
b8a00a4d JH |
3935 | } |
3936 | } | |
3937 | ||
1084e689 JH |
3938 | /* Return true if BB has at least one abnormal outgoing edge. */ |
3939 | ||
3940 | static inline bool | |
3941 | has_abnormal_outgoing_edge_p (basic_block bb) | |
3942 | { | |
3943 | edge e; | |
3944 | edge_iterator ei; | |
3945 | ||
3946 | FOR_EACH_EDGE (e, ei, bb->succs) | |
3947 | if (e->flags & EDGE_ABNORMAL) | |
3948 | return true; | |
3949 | ||
3950 | return false; | |
3951 | } | |
3952 | ||
d4e4baa9 AO |
3953 | /* Expand calls to inline functions in the body of FN. */ |
3954 | ||
873aa8f5 | 3955 | unsigned int |
46c5ad27 | 3956 | optimize_inline_calls (tree fn) |
d4e4baa9 | 3957 | { |
1b369fae | 3958 | copy_body_data id; |
d4e4baa9 | 3959 | tree prev_fn; |
e21aff8a | 3960 | basic_block bb; |
b8a00a4d | 3961 | int last = n_basic_blocks; |
d406b663 JJ |
3962 | struct gimplify_ctx gctx; |
3963 | ||
c5b6f18e MM |
3964 | /* There is no point in performing inlining if errors have already |
3965 | occurred -- and we might crash if we try to inline invalid | |
3966 | code. */ | |
3967 | if (errorcount || sorrycount) | |
873aa8f5 | 3968 | return 0; |
c5b6f18e | 3969 | |
d4e4baa9 AO |
3970 | /* Clear out ID. */ |
3971 | memset (&id, 0, sizeof (id)); | |
3972 | ||
1b369fae RH |
3973 | id.src_node = id.dst_node = cgraph_node (fn); |
3974 | id.dst_fn = fn; | |
d4e4baa9 AO |
3975 | /* Or any functions that aren't finished yet. */ |
3976 | prev_fn = NULL_TREE; | |
3977 | if (current_function_decl) | |
3978 | { | |
1b369fae | 3979 | id.dst_fn = current_function_decl; |
d4e4baa9 AO |
3980 | prev_fn = current_function_decl; |
3981 | } | |
1b369fae RH |
3982 | |
3983 | id.copy_decl = copy_decl_maybe_to_var; | |
3984 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
3985 | id.transform_new_cfg = false; | |
3986 | id.transform_return_to_modify = true; | |
9ff420f1 | 3987 | id.transform_lang_insert_block = NULL; |
b8a00a4d | 3988 | id.statements_to_fold = pointer_set_create (); |
1b369fae | 3989 | |
d406b663 | 3990 | push_gimplify_context (&gctx); |
d4e4baa9 | 3991 | |
672987e8 ZD |
3992 | /* We make no attempts to keep dominance info up-to-date. */ |
3993 | free_dominance_info (CDI_DOMINATORS); | |
3994 | free_dominance_info (CDI_POST_DOMINATORS); | |
3995 | ||
726a989a RB |
3996 | /* Register specific gimple functions. */ |
3997 | gimple_register_cfg_hooks (); | |
3998 | ||
e21aff8a SB |
3999 | /* Reach the trees by walking over the CFG, and note the |
4000 | enclosing basic-blocks in the call edges. */ | |
4001 | /* We walk the blocks going forward, because inlined function bodies | |
4002 | will split id->current_basic_block, and the new blocks will | |
4003 | follow it; we'll trudge through them, processing their CALL_EXPRs | |
4004 | along the way. */ | |
4005 | FOR_EACH_BB (bb) | |
4006 | gimple_expand_calls_inline (bb, &id); | |
d4e4baa9 | 4007 | |
e21aff8a | 4008 | pop_gimplify_context (NULL); |
6de9cd9a | 4009 | |
18c6ada9 JH |
4010 | #ifdef ENABLE_CHECKING |
4011 | { | |
4012 | struct cgraph_edge *e; | |
4013 | ||
1b369fae | 4014 | verify_cgraph_node (id.dst_node); |
18c6ada9 JH |
4015 | |
4016 | /* Double check that we inlined everything we are supposed to inline. */ | |
1b369fae | 4017 | for (e = id.dst_node->callees; e; e = e->next_callee) |
1e128c5f | 4018 | gcc_assert (e->inline_failed); |
18c6ada9 JH |
4019 | } |
4020 | #endif | |
a9eafe81 AP |
4021 | |
4022 | /* Fold the statements before compacting/renumbering the basic blocks. */ | |
4023 | fold_marked_statements (last, id.statements_to_fold); | |
4024 | pointer_set_destroy (id.statements_to_fold); | |
4025 | ||
b5b8b0ac AO |
4026 | gcc_assert (!id.debug_stmts); |
4027 | ||
a9eafe81 AP |
4028 | /* Renumber the (code) basic_blocks consecutively. */ |
4029 | compact_blocks (); | |
4030 | /* Renumber the lexical scoping (non-code) blocks consecutively. */ | |
4031 | number_blocks (fn); | |
b8a00a4d | 4032 | |
873aa8f5 | 4033 | fold_cond_expr_cond (); |
078c3644 JH |
4034 | delete_unreachable_blocks_update_callgraph (&id); |
4035 | #ifdef ENABLE_CHECKING | |
4036 | verify_cgraph_node (id.dst_node); | |
4037 | #endif | |
726a989a | 4038 | |
110cfe1c JH |
4039 | /* It would be nice to check SSA/CFG/statement consistency here, but it is |
4040 | not possible yet - the IPA passes might make various functions to not | |
4041 | throw and they don't care to proactively update local EH info. This is | |
4042 | done later in fixup_cfg pass that also execute the verification. */ | |
726a989a RB |
4043 | return (TODO_update_ssa |
4044 | | TODO_cleanup_cfg | |
45a80bb9 JH |
4045 | | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0) |
4046 | | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0)); | |
d4e4baa9 AO |
4047 | } |
4048 | ||
d4e4baa9 AO |
4049 | /* Passed to walk_tree. Copies the node pointed to, if appropriate. */ |
4050 | ||
4051 | tree | |
46c5ad27 | 4052 | copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
d4e4baa9 AO |
4053 | { |
4054 | enum tree_code code = TREE_CODE (*tp); | |
07beea0d | 4055 | enum tree_code_class cl = TREE_CODE_CLASS (code); |
d4e4baa9 AO |
4056 | |
4057 | /* We make copies of most nodes. */ | |
07beea0d | 4058 | if (IS_EXPR_CODE_CLASS (cl) |
d4e4baa9 AO |
4059 | || code == TREE_LIST |
4060 | || code == TREE_VEC | |
8843c120 DN |
4061 | || code == TYPE_DECL |
4062 | || code == OMP_CLAUSE) | |
d4e4baa9 AO |
4063 | { |
4064 | /* Because the chain gets clobbered when we make a copy, we save it | |
4065 | here. */ | |
82d6e6fc | 4066 | tree chain = NULL_TREE, new_tree; |
07beea0d | 4067 | |
726a989a | 4068 | chain = TREE_CHAIN (*tp); |
d4e4baa9 AO |
4069 | |
4070 | /* Copy the node. */ | |
82d6e6fc | 4071 | new_tree = copy_node (*tp); |
6de9cd9a DN |
4072 | |
4073 | /* Propagate mudflap marked-ness. */ | |
4074 | if (flag_mudflap && mf_marked_p (*tp)) | |
82d6e6fc | 4075 | mf_mark (new_tree); |
6de9cd9a | 4076 | |
82d6e6fc | 4077 | *tp = new_tree; |
d4e4baa9 AO |
4078 | |
4079 | /* Now, restore the chain, if appropriate. That will cause | |
4080 | walk_tree to walk into the chain as well. */ | |
50674e96 DN |
4081 | if (code == PARM_DECL |
4082 | || code == TREE_LIST | |
aaf46ef9 | 4083 | || code == OMP_CLAUSE) |
d4e4baa9 AO |
4084 | TREE_CHAIN (*tp) = chain; |
4085 | ||
4086 | /* For now, we don't update BLOCKs when we make copies. So, we | |
6de9cd9a DN |
4087 | have to nullify all BIND_EXPRs. */ |
4088 | if (TREE_CODE (*tp) == BIND_EXPR) | |
4089 | BIND_EXPR_BLOCK (*tp) = NULL_TREE; | |
d4e4baa9 | 4090 | } |
4038c495 GB |
4091 | else if (code == CONSTRUCTOR) |
4092 | { | |
4093 | /* CONSTRUCTOR nodes need special handling because | |
4094 | we need to duplicate the vector of elements. */ | |
82d6e6fc | 4095 | tree new_tree; |
4038c495 | 4096 | |
82d6e6fc | 4097 | new_tree = copy_node (*tp); |
4038c495 GB |
4098 | |
4099 | /* Propagate mudflap marked-ness. */ | |
4100 | if (flag_mudflap && mf_marked_p (*tp)) | |
82d6e6fc | 4101 | mf_mark (new_tree); |
9f63daea | 4102 | |
82d6e6fc | 4103 | CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc, |
4038c495 | 4104 | CONSTRUCTOR_ELTS (*tp)); |
82d6e6fc | 4105 | *tp = new_tree; |
4038c495 | 4106 | } |
6615c446 | 4107 | else if (TREE_CODE_CLASS (code) == tcc_type) |
d4e4baa9 | 4108 | *walk_subtrees = 0; |
6615c446 | 4109 | else if (TREE_CODE_CLASS (code) == tcc_declaration) |
6de9cd9a | 4110 | *walk_subtrees = 0; |
a396f8ae GK |
4111 | else if (TREE_CODE_CLASS (code) == tcc_constant) |
4112 | *walk_subtrees = 0; | |
1e128c5f GB |
4113 | else |
4114 | gcc_assert (code != STATEMENT_LIST); | |
d4e4baa9 AO |
4115 | return NULL_TREE; |
4116 | } | |
4117 | ||
4118 | /* The SAVE_EXPR pointed to by TP is being copied. If ST contains | |
aa4a53af | 4119 | information indicating to what new SAVE_EXPR this one should be mapped, |
e21aff8a SB |
4120 | use that one. Otherwise, create a new node and enter it in ST. FN is |
4121 | the function into which the copy will be placed. */ | |
d4e4baa9 | 4122 | |
892c7e1e | 4123 | static void |
82c82743 | 4124 | remap_save_expr (tree *tp, void *st_, int *walk_subtrees) |
d4e4baa9 | 4125 | { |
6be42dd4 RG |
4126 | struct pointer_map_t *st = (struct pointer_map_t *) st_; |
4127 | tree *n; | |
5e20bdd7 | 4128 | tree t; |
d4e4baa9 AO |
4129 | |
4130 | /* See if we already encountered this SAVE_EXPR. */ | |
6be42dd4 | 4131 | n = (tree *) pointer_map_contains (st, *tp); |
d92b4486 | 4132 | |
d4e4baa9 AO |
4133 | /* If we didn't already remap this SAVE_EXPR, do so now. */ |
4134 | if (!n) | |
4135 | { | |
5e20bdd7 | 4136 | t = copy_node (*tp); |
d4e4baa9 | 4137 | |
d4e4baa9 | 4138 | /* Remember this SAVE_EXPR. */ |
6be42dd4 | 4139 | *pointer_map_insert (st, *tp) = t; |
350ebd54 | 4140 | /* Make sure we don't remap an already-remapped SAVE_EXPR. */ |
6be42dd4 | 4141 | *pointer_map_insert (st, t) = t; |
d4e4baa9 AO |
4142 | } |
4143 | else | |
5e20bdd7 JZ |
4144 | { |
4145 | /* We've already walked into this SAVE_EXPR; don't do it again. */ | |
4146 | *walk_subtrees = 0; | |
6be42dd4 | 4147 | t = *n; |
5e20bdd7 | 4148 | } |
d4e4baa9 AO |
4149 | |
4150 | /* Replace this SAVE_EXPR with the copy. */ | |
5e20bdd7 | 4151 | *tp = t; |
d4e4baa9 | 4152 | } |
d436bff8 | 4153 | |
aa4a53af RK |
4154 | /* Called via walk_tree. If *TP points to a DECL_STMT for a local label, |
4155 | copies the declaration and enters it in the splay_tree in DATA (which is | |
1b369fae | 4156 | really an `copy_body_data *'). */ |
6de9cd9a DN |
4157 | |
4158 | static tree | |
4159 | mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, | |
4160 | void *data) | |
4161 | { | |
1b369fae | 4162 | copy_body_data *id = (copy_body_data *) data; |
6de9cd9a DN |
4163 | |
4164 | /* Don't walk into types. */ | |
350fae66 RK |
4165 | if (TYPE_P (*tp)) |
4166 | *walk_subtrees = 0; | |
6de9cd9a | 4167 | |
350fae66 | 4168 | else if (TREE_CODE (*tp) == LABEL_EXPR) |
6de9cd9a | 4169 | { |
350fae66 | 4170 | tree decl = TREE_OPERAND (*tp, 0); |
6de9cd9a | 4171 | |
350fae66 | 4172 | /* Copy the decl and remember the copy. */ |
1b369fae | 4173 | insert_decl_map (id, decl, id->copy_decl (decl, id)); |
6de9cd9a DN |
4174 | } |
4175 | ||
4176 | return NULL_TREE; | |
4177 | } | |
4178 | ||
19114537 EC |
4179 | /* Perform any modifications to EXPR required when it is unsaved. Does |
4180 | not recurse into EXPR's subtrees. */ | |
4181 | ||
4182 | static void | |
4183 | unsave_expr_1 (tree expr) | |
4184 | { | |
4185 | switch (TREE_CODE (expr)) | |
4186 | { | |
4187 | case TARGET_EXPR: | |
4188 | /* Don't mess with a TARGET_EXPR that hasn't been expanded. | |
4189 | It's OK for this to happen if it was part of a subtree that | |
4190 | isn't immediately expanded, such as operand 2 of another | |
4191 | TARGET_EXPR. */ | |
4192 | if (TREE_OPERAND (expr, 1)) | |
4193 | break; | |
4194 | ||
4195 | TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3); | |
4196 | TREE_OPERAND (expr, 3) = NULL_TREE; | |
4197 | break; | |
4198 | ||
4199 | default: | |
4200 | break; | |
4201 | } | |
4202 | } | |
4203 | ||
6de9cd9a DN |
4204 | /* Called via walk_tree when an expression is unsaved. Using the |
4205 | splay_tree pointed to by ST (which is really a `splay_tree'), | |
4206 | remaps all local declarations to appropriate replacements. */ | |
d436bff8 AH |
4207 | |
4208 | static tree | |
6de9cd9a | 4209 | unsave_r (tree *tp, int *walk_subtrees, void *data) |
d436bff8 | 4210 | { |
1b369fae | 4211 | copy_body_data *id = (copy_body_data *) data; |
6be42dd4 RG |
4212 | struct pointer_map_t *st = id->decl_map; |
4213 | tree *n; | |
6de9cd9a DN |
4214 | |
4215 | /* Only a local declaration (variable or label). */ | |
4216 | if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp)) | |
4217 | || TREE_CODE (*tp) == LABEL_DECL) | |
4218 | { | |
4219 | /* Lookup the declaration. */ | |
6be42dd4 | 4220 | n = (tree *) pointer_map_contains (st, *tp); |
9f63daea | 4221 | |
6de9cd9a DN |
4222 | /* If it's there, remap it. */ |
4223 | if (n) | |
6be42dd4 | 4224 | *tp = *n; |
6de9cd9a | 4225 | } |
aa4a53af | 4226 | |
6de9cd9a | 4227 | else if (TREE_CODE (*tp) == STATEMENT_LIST) |
726a989a | 4228 | gcc_unreachable (); |
6de9cd9a DN |
4229 | else if (TREE_CODE (*tp) == BIND_EXPR) |
4230 | copy_bind_expr (tp, walk_subtrees, id); | |
a406865a RG |
4231 | else if (TREE_CODE (*tp) == SAVE_EXPR |
4232 | || TREE_CODE (*tp) == TARGET_EXPR) | |
82c82743 | 4233 | remap_save_expr (tp, st, walk_subtrees); |
d436bff8 | 4234 | else |
6de9cd9a DN |
4235 | { |
4236 | copy_tree_r (tp, walk_subtrees, NULL); | |
4237 | ||
4238 | /* Do whatever unsaving is required. */ | |
4239 | unsave_expr_1 (*tp); | |
4240 | } | |
4241 | ||
4242 | /* Keep iterating. */ | |
4243 | return NULL_TREE; | |
d436bff8 AH |
4244 | } |
4245 | ||
19114537 EC |
4246 | /* Copies everything in EXPR and replaces variables, labels |
4247 | and SAVE_EXPRs local to EXPR. */ | |
6de9cd9a DN |
4248 | |
4249 | tree | |
19114537 | 4250 | unsave_expr_now (tree expr) |
6de9cd9a | 4251 | { |
1b369fae | 4252 | copy_body_data id; |
6de9cd9a DN |
4253 | |
4254 | /* There's nothing to do for NULL_TREE. */ | |
4255 | if (expr == 0) | |
4256 | return expr; | |
4257 | ||
4258 | /* Set up ID. */ | |
4259 | memset (&id, 0, sizeof (id)); | |
1b369fae RH |
4260 | id.src_fn = current_function_decl; |
4261 | id.dst_fn = current_function_decl; | |
6be42dd4 | 4262 | id.decl_map = pointer_map_create (); |
b5b8b0ac | 4263 | id.debug_map = NULL; |
6de9cd9a | 4264 | |
1b369fae RH |
4265 | id.copy_decl = copy_decl_no_change; |
4266 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
4267 | id.transform_new_cfg = false; | |
4268 | id.transform_return_to_modify = false; | |
9ff420f1 | 4269 | id.transform_lang_insert_block = NULL; |
1b369fae | 4270 | |
6de9cd9a DN |
4271 | /* Walk the tree once to find local labels. */ |
4272 | walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id); | |
4273 | ||
4274 | /* Walk the tree again, copying, remapping, and unsaving. */ | |
4275 | walk_tree (&expr, unsave_r, &id, NULL); | |
4276 | ||
4277 | /* Clean up. */ | |
6be42dd4 | 4278 | pointer_map_destroy (id.decl_map); |
b5b8b0ac AO |
4279 | if (id.debug_map) |
4280 | pointer_map_destroy (id.debug_map); | |
6de9cd9a DN |
4281 | |
4282 | return expr; | |
4283 | } | |
4284 | ||
726a989a RB |
4285 | /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local |
4286 | label, copies the declaration and enters it in the splay_tree in DATA (which | |
4287 | is really a 'copy_body_data *'. */ | |
4288 | ||
4289 | static tree | |
4290 | mark_local_labels_stmt (gimple_stmt_iterator *gsip, | |
4291 | bool *handled_ops_p ATTRIBUTE_UNUSED, | |
4292 | struct walk_stmt_info *wi) | |
4293 | { | |
4294 | copy_body_data *id = (copy_body_data *) wi->info; | |
4295 | gimple stmt = gsi_stmt (*gsip); | |
4296 | ||
4297 | if (gimple_code (stmt) == GIMPLE_LABEL) | |
4298 | { | |
4299 | tree decl = gimple_label_label (stmt); | |
4300 | ||
4301 | /* Copy the decl and remember the copy. */ | |
4302 | insert_decl_map (id, decl, id->copy_decl (decl, id)); | |
4303 | } | |
4304 | ||
4305 | return NULL_TREE; | |
4306 | } | |
4307 | ||
4308 | ||
4309 | /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local. | |
4310 | Using the splay_tree pointed to by ST (which is really a `splay_tree'), | |
4311 | remaps all local declarations to appropriate replacements in gimple | |
4312 | operands. */ | |
4313 | ||
4314 | static tree | |
4315 | replace_locals_op (tree *tp, int *walk_subtrees, void *data) | |
4316 | { | |
4317 | struct walk_stmt_info *wi = (struct walk_stmt_info*) data; | |
4318 | copy_body_data *id = (copy_body_data *) wi->info; | |
4319 | struct pointer_map_t *st = id->decl_map; | |
4320 | tree *n; | |
4321 | tree expr = *tp; | |
4322 | ||
4323 | /* Only a local declaration (variable or label). */ | |
4324 | if ((TREE_CODE (expr) == VAR_DECL | |
4325 | && !TREE_STATIC (expr)) | |
4326 | || TREE_CODE (expr) == LABEL_DECL) | |
4327 | { | |
4328 | /* Lookup the declaration. */ | |
4329 | n = (tree *) pointer_map_contains (st, expr); | |
4330 | ||
4331 | /* If it's there, remap it. */ | |
4332 | if (n) | |
4333 | *tp = *n; | |
4334 | *walk_subtrees = 0; | |
4335 | } | |
4336 | else if (TREE_CODE (expr) == STATEMENT_LIST | |
4337 | || TREE_CODE (expr) == BIND_EXPR | |
4338 | || TREE_CODE (expr) == SAVE_EXPR) | |
4339 | gcc_unreachable (); | |
4340 | else if (TREE_CODE (expr) == TARGET_EXPR) | |
4341 | { | |
4342 | /* Don't mess with a TARGET_EXPR that hasn't been expanded. | |
4343 | It's OK for this to happen if it was part of a subtree that | |
4344 | isn't immediately expanded, such as operand 2 of another | |
4345 | TARGET_EXPR. */ | |
4346 | if (!TREE_OPERAND (expr, 1)) | |
4347 | { | |
4348 | TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3); | |
4349 | TREE_OPERAND (expr, 3) = NULL_TREE; | |
4350 | } | |
4351 | } | |
4352 | ||
4353 | /* Keep iterating. */ | |
4354 | return NULL_TREE; | |
4355 | } | |
4356 | ||
4357 | ||
4358 | /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local. | |
4359 | Using the splay_tree pointed to by ST (which is really a `splay_tree'), | |
4360 | remaps all local declarations to appropriate replacements in gimple | |
4361 | statements. */ | |
4362 | ||
4363 | static tree | |
4364 | replace_locals_stmt (gimple_stmt_iterator *gsip, | |
4365 | bool *handled_ops_p ATTRIBUTE_UNUSED, | |
4366 | struct walk_stmt_info *wi) | |
4367 | { | |
4368 | copy_body_data *id = (copy_body_data *) wi->info; | |
4369 | gimple stmt = gsi_stmt (*gsip); | |
4370 | ||
4371 | if (gimple_code (stmt) == GIMPLE_BIND) | |
4372 | { | |
4373 | tree block = gimple_bind_block (stmt); | |
4374 | ||
4375 | if (block) | |
4376 | { | |
4377 | remap_block (&block, id); | |
4378 | gimple_bind_set_block (stmt, block); | |
4379 | } | |
4380 | ||
4381 | /* This will remap a lot of the same decls again, but this should be | |
4382 | harmless. */ | |
4383 | if (gimple_bind_vars (stmt)) | |
526d73ab | 4384 | gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id)); |
726a989a RB |
4385 | } |
4386 | ||
4387 | /* Keep iterating. */ | |
4388 | return NULL_TREE; | |
4389 | } | |
4390 | ||
4391 | ||
4392 | /* Copies everything in SEQ and replaces variables and labels local to | |
4393 | current_function_decl. */ | |
4394 | ||
4395 | gimple_seq | |
4396 | copy_gimple_seq_and_replace_locals (gimple_seq seq) | |
4397 | { | |
4398 | copy_body_data id; | |
4399 | struct walk_stmt_info wi; | |
4400 | struct pointer_set_t *visited; | |
4401 | gimple_seq copy; | |
4402 | ||
4403 | /* There's nothing to do for NULL_TREE. */ | |
4404 | if (seq == NULL) | |
4405 | return seq; | |
4406 | ||
4407 | /* Set up ID. */ | |
4408 | memset (&id, 0, sizeof (id)); | |
4409 | id.src_fn = current_function_decl; | |
4410 | id.dst_fn = current_function_decl; | |
4411 | id.decl_map = pointer_map_create (); | |
b5b8b0ac | 4412 | id.debug_map = NULL; |
726a989a RB |
4413 | |
4414 | id.copy_decl = copy_decl_no_change; | |
4415 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
4416 | id.transform_new_cfg = false; | |
4417 | id.transform_return_to_modify = false; | |
4418 | id.transform_lang_insert_block = NULL; | |
4419 | ||
4420 | /* Walk the tree once to find local labels. */ | |
4421 | memset (&wi, 0, sizeof (wi)); | |
4422 | visited = pointer_set_create (); | |
4423 | wi.info = &id; | |
4424 | wi.pset = visited; | |
4425 | walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi); | |
4426 | pointer_set_destroy (visited); | |
4427 | ||
4428 | copy = gimple_seq_copy (seq); | |
4429 | ||
4430 | /* Walk the copy, remapping decls. */ | |
4431 | memset (&wi, 0, sizeof (wi)); | |
4432 | wi.info = &id; | |
4433 | walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi); | |
4434 | ||
4435 | /* Clean up. */ | |
4436 | pointer_map_destroy (id.decl_map); | |
b5b8b0ac AO |
4437 | if (id.debug_map) |
4438 | pointer_map_destroy (id.debug_map); | |
726a989a RB |
4439 | |
4440 | return copy; | |
4441 | } | |
4442 | ||
4443 | ||
6de9cd9a | 4444 | /* Allow someone to determine if SEARCH is a child of TOP from gdb. */ |
aa4a53af | 4445 | |
6de9cd9a DN |
4446 | static tree |
4447 | debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data) | |
4448 | { | |
4449 | if (*tp == data) | |
4450 | return (tree) data; | |
4451 | else | |
4452 | return NULL; | |
4453 | } | |
4454 | ||
6de9cd9a DN |
4455 | bool |
4456 | debug_find_tree (tree top, tree search) | |
4457 | { | |
4458 | return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0; | |
4459 | } | |
4460 | ||
e21aff8a | 4461 | |
6de9cd9a DN |
4462 | /* Declare the variables created by the inliner. Add all the variables in |
4463 | VARS to BIND_EXPR. */ | |
4464 | ||
4465 | static void | |
e21aff8a | 4466 | declare_inline_vars (tree block, tree vars) |
6de9cd9a | 4467 | { |
84936f6f RH |
4468 | tree t; |
4469 | for (t = vars; t; t = TREE_CHAIN (t)) | |
9659ce8b JH |
4470 | { |
4471 | DECL_SEEN_IN_BIND_EXPR_P (t) = 1; | |
4472 | gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t)); | |
cb91fab0 | 4473 | cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls); |
9659ce8b | 4474 | } |
6de9cd9a | 4475 | |
e21aff8a SB |
4476 | if (block) |
4477 | BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars); | |
4478 | } | |
4479 | ||
19734dd8 | 4480 | /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN, |
1b369fae RH |
4481 | but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to |
4482 | VAR_DECL translation. */ | |
19734dd8 | 4483 | |
1b369fae RH |
4484 | static tree |
4485 | copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy) | |
19734dd8 | 4486 | { |
19734dd8 RL |
4487 | /* Don't generate debug information for the copy if we wouldn't have |
4488 | generated it for the copy either. */ | |
4489 | DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl); | |
4490 | DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl); | |
4491 | ||
4492 | /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what | |
4493 | declaration inspired this copy. */ | |
4494 | DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl); | |
4495 | ||
4496 | /* The new variable/label has no RTL, yet. */ | |
68a976f2 RL |
4497 | if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL) |
4498 | && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy)) | |
19734dd8 RL |
4499 | SET_DECL_RTL (copy, NULL_RTX); |
4500 | ||
4501 | /* These args would always appear unused, if not for this. */ | |
4502 | TREE_USED (copy) = 1; | |
4503 | ||
4504 | /* Set the context for the new declaration. */ | |
4505 | if (!DECL_CONTEXT (decl)) | |
4506 | /* Globals stay global. */ | |
4507 | ; | |
1b369fae | 4508 | else if (DECL_CONTEXT (decl) != id->src_fn) |
19734dd8 RL |
4509 | /* Things that weren't in the scope of the function we're inlining |
4510 | from aren't in the scope we're inlining to, either. */ | |
4511 | ; | |
4512 | else if (TREE_STATIC (decl)) | |
4513 | /* Function-scoped static variables should stay in the original | |
4514 | function. */ | |
4515 | ; | |
4516 | else | |
4517 | /* Ordinary automatic local variables are now in the scope of the | |
4518 | new function. */ | |
1b369fae | 4519 | DECL_CONTEXT (copy) = id->dst_fn; |
19734dd8 RL |
4520 | |
4521 | return copy; | |
4522 | } | |
4523 | ||
1b369fae RH |
4524 | static tree |
4525 | copy_decl_to_var (tree decl, copy_body_data *id) | |
4526 | { | |
4527 | tree copy, type; | |
4528 | ||
4529 | gcc_assert (TREE_CODE (decl) == PARM_DECL | |
4530 | || TREE_CODE (decl) == RESULT_DECL); | |
4531 | ||
4532 | type = TREE_TYPE (decl); | |
4533 | ||
c2255bc4 AH |
4534 | copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn), |
4535 | VAR_DECL, DECL_NAME (decl), type); | |
1b369fae RH |
4536 | TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl); |
4537 | TREE_READONLY (copy) = TREE_READONLY (decl); | |
4538 | TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl); | |
0890b981 | 4539 | DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl); |
1b369fae RH |
4540 | |
4541 | return copy_decl_for_dup_finish (id, decl, copy); | |
4542 | } | |
4543 | ||
c08cd4c1 JM |
4544 | /* Like copy_decl_to_var, but create a return slot object instead of a |
4545 | pointer variable for return by invisible reference. */ | |
4546 | ||
4547 | static tree | |
4548 | copy_result_decl_to_var (tree decl, copy_body_data *id) | |
4549 | { | |
4550 | tree copy, type; | |
4551 | ||
4552 | gcc_assert (TREE_CODE (decl) == PARM_DECL | |
4553 | || TREE_CODE (decl) == RESULT_DECL); | |
4554 | ||
4555 | type = TREE_TYPE (decl); | |
4556 | if (DECL_BY_REFERENCE (decl)) | |
4557 | type = TREE_TYPE (type); | |
4558 | ||
c2255bc4 AH |
4559 | copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn), |
4560 | VAR_DECL, DECL_NAME (decl), type); | |
c08cd4c1 JM |
4561 | TREE_READONLY (copy) = TREE_READONLY (decl); |
4562 | TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl); | |
4563 | if (!DECL_BY_REFERENCE (decl)) | |
4564 | { | |
4565 | TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl); | |
0890b981 | 4566 | DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl); |
c08cd4c1 JM |
4567 | } |
4568 | ||
4569 | return copy_decl_for_dup_finish (id, decl, copy); | |
4570 | } | |
4571 | ||
9ff420f1 | 4572 | tree |
1b369fae RH |
4573 | copy_decl_no_change (tree decl, copy_body_data *id) |
4574 | { | |
4575 | tree copy; | |
4576 | ||
4577 | copy = copy_node (decl); | |
4578 | ||
4579 | /* The COPY is not abstract; it will be generated in DST_FN. */ | |
4580 | DECL_ABSTRACT (copy) = 0; | |
4581 | lang_hooks.dup_lang_specific_decl (copy); | |
4582 | ||
4583 | /* TREE_ADDRESSABLE isn't used to indicate that a label's address has | |
4584 | been taken; it's for internal bookkeeping in expand_goto_internal. */ | |
4585 | if (TREE_CODE (copy) == LABEL_DECL) | |
4586 | { | |
4587 | TREE_ADDRESSABLE (copy) = 0; | |
4588 | LABEL_DECL_UID (copy) = -1; | |
4589 | } | |
4590 | ||
4591 | return copy_decl_for_dup_finish (id, decl, copy); | |
4592 | } | |
4593 | ||
4594 | static tree | |
4595 | copy_decl_maybe_to_var (tree decl, copy_body_data *id) | |
4596 | { | |
4597 | if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL) | |
4598 | return copy_decl_to_var (decl, id); | |
4599 | else | |
4600 | return copy_decl_no_change (decl, id); | |
4601 | } | |
4602 | ||
19734dd8 RL |
4603 | /* Return a copy of the function's argument tree. */ |
4604 | static tree | |
c6f7cfc1 JH |
4605 | copy_arguments_for_versioning (tree orig_parm, copy_body_data * id, |
4606 | bitmap args_to_skip, tree *vars) | |
19734dd8 | 4607 | { |
c6f7cfc1 JH |
4608 | tree arg, *parg; |
4609 | tree new_parm = NULL; | |
4610 | int i = 0; | |
19734dd8 | 4611 | |
c6f7cfc1 JH |
4612 | parg = &new_parm; |
4613 | ||
4614 | for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++) | |
4615 | if (!args_to_skip || !bitmap_bit_p (args_to_skip, i)) | |
4616 | { | |
4617 | tree new_tree = remap_decl (arg, id); | |
4618 | lang_hooks.dup_lang_specific_decl (new_tree); | |
4619 | *parg = new_tree; | |
4620 | parg = &TREE_CHAIN (new_tree); | |
4621 | } | |
eb50f5f4 | 4622 | else if (!pointer_map_contains (id->decl_map, arg)) |
c6f7cfc1 JH |
4623 | { |
4624 | /* Make an equivalent VAR_DECL. If the argument was used | |
4625 | as temporary variable later in function, the uses will be | |
4626 | replaced by local variable. */ | |
4627 | tree var = copy_decl_to_var (arg, id); | |
4628 | get_var_ann (var); | |
4629 | add_referenced_var (var); | |
4630 | insert_decl_map (id, arg, var); | |
4631 | /* Declare this new variable. */ | |
4632 | TREE_CHAIN (var) = *vars; | |
4633 | *vars = var; | |
4634 | } | |
4635 | return new_parm; | |
19734dd8 RL |
4636 | } |
4637 | ||
4638 | /* Return a copy of the function's static chain. */ | |
4639 | static tree | |
1b369fae | 4640 | copy_static_chain (tree static_chain, copy_body_data * id) |
19734dd8 RL |
4641 | { |
4642 | tree *chain_copy, *pvar; | |
4643 | ||
4644 | chain_copy = &static_chain; | |
4645 | for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar)) | |
4646 | { | |
82d6e6fc KG |
4647 | tree new_tree = remap_decl (*pvar, id); |
4648 | lang_hooks.dup_lang_specific_decl (new_tree); | |
4649 | TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar); | |
4650 | *pvar = new_tree; | |
19734dd8 RL |
4651 | } |
4652 | return static_chain; | |
4653 | } | |
4654 | ||
4655 | /* Return true if the function is allowed to be versioned. | |
4656 | This is a guard for the versioning functionality. */ | |
27dbd3ac | 4657 | |
19734dd8 RL |
4658 | bool |
4659 | tree_versionable_function_p (tree fndecl) | |
4660 | { | |
86631ea3 MJ |
4661 | return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl)) |
4662 | && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL); | |
19734dd8 RL |
4663 | } |
4664 | ||
9187e02d JH |
4665 | /* Delete all unreachable basic blocks and update callgraph. |
4666 | Doing so is somewhat nontrivial because we need to update all clones and | |
4667 | remove inline function that become unreachable. */ | |
9f5e9983 | 4668 | |
9187e02d JH |
4669 | static bool |
4670 | delete_unreachable_blocks_update_callgraph (copy_body_data *id) | |
9f5e9983 | 4671 | { |
9187e02d JH |
4672 | bool changed = false; |
4673 | basic_block b, next_bb; | |
4674 | ||
4675 | find_unreachable_blocks (); | |
4676 | ||
4677 | /* Delete all unreachable basic blocks. */ | |
4678 | ||
4679 | for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb) | |
4680 | { | |
4681 | next_bb = b->next_bb; | |
4682 | ||
4683 | if (!(b->flags & BB_REACHABLE)) | |
4684 | { | |
4685 | gimple_stmt_iterator bsi; | |
4686 | ||
4687 | for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi)) | |
4688 | if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL) | |
4689 | { | |
4690 | struct cgraph_edge *e; | |
4691 | struct cgraph_node *node; | |
4692 | ||
4693 | if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL) | |
4694 | { | |
4695 | if (!e->inline_failed) | |
4696 | cgraph_remove_node_and_inline_clones (e->callee); | |
4697 | else | |
4698 | cgraph_remove_edge (e); | |
4699 | } | |
4700 | if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES | |
4701 | && id->dst_node->clones) | |
4702 | for (node = id->dst_node->clones; node != id->dst_node;) | |
4703 | { | |
4704 | if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL) | |
4705 | { | |
4706 | if (!e->inline_failed) | |
4707 | cgraph_remove_node_and_inline_clones (e->callee); | |
4708 | else | |
4709 | cgraph_remove_edge (e); | |
4710 | } | |
4711 | ||
4712 | if (node->clones) | |
4713 | node = node->clones; | |
4714 | else if (node->next_sibling_clone) | |
4715 | node = node->next_sibling_clone; | |
4716 | else | |
4717 | { | |
4718 | while (node != id->dst_node && !node->next_sibling_clone) | |
4719 | node = node->clone_of; | |
4720 | if (node != id->dst_node) | |
4721 | node = node->next_sibling_clone; | |
4722 | } | |
4723 | } | |
4724 | } | |
4725 | delete_basic_block (b); | |
4726 | changed = true; | |
4727 | } | |
4728 | } | |
4729 | ||
4730 | if (changed) | |
4731 | tidy_fallthru_edges (); | |
4732 | #ifdef ENABLE_CHECKING0 | |
4733 | verify_cgraph_node (id->dst_node); | |
4734 | if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES | |
4735 | && id->dst_node->clones) | |
4736 | { | |
4737 | struct cgraph_node *node; | |
4738 | for (node = id->dst_node->clones; node != id->dst_node;) | |
4739 | { | |
4740 | verify_cgraph_node (node); | |
4741 | ||
4742 | if (node->clones) | |
4743 | node = node->clones; | |
4744 | else if (node->next_sibling_clone) | |
4745 | node = node->next_sibling_clone; | |
4746 | else | |
4747 | { | |
4748 | while (node != id->dst_node && !node->next_sibling_clone) | |
4749 | node = node->clone_of; | |
4750 | if (node != id->dst_node) | |
4751 | node = node->next_sibling_clone; | |
4752 | } | |
4753 | } | |
4754 | } | |
9f5e9983 | 4755 | #endif |
9187e02d | 4756 | return changed; |
9f5e9983 JJ |
4757 | } |
4758 | ||
08ad1d6d JH |
4759 | /* Update clone info after duplication. */ |
4760 | ||
4761 | static void | |
4762 | update_clone_info (copy_body_data * id) | |
4763 | { | |
4764 | struct cgraph_node *node; | |
4765 | if (!id->dst_node->clones) | |
4766 | return; | |
4767 | for (node = id->dst_node->clones; node != id->dst_node;) | |
4768 | { | |
4769 | /* First update replace maps to match the new body. */ | |
4770 | if (node->clone.tree_map) | |
4771 | { | |
4772 | unsigned int i; | |
4773 | for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++) | |
4774 | { | |
4775 | struct ipa_replace_map *replace_info; | |
4776 | replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i); | |
4777 | walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL); | |
4778 | walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL); | |
4779 | } | |
4780 | } | |
4781 | if (node->clones) | |
4782 | node = node->clones; | |
4783 | else if (node->next_sibling_clone) | |
4784 | node = node->next_sibling_clone; | |
4785 | else | |
4786 | { | |
4787 | while (node != id->dst_node && !node->next_sibling_clone) | |
4788 | node = node->clone_of; | |
4789 | if (node != id->dst_node) | |
4790 | node = node->next_sibling_clone; | |
4791 | } | |
4792 | } | |
4793 | } | |
4794 | ||
19734dd8 RL |
4795 | /* Create a copy of a function's tree. |
4796 | OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes | |
4797 | of the original function and the new copied function | |
4798 | respectively. In case we want to replace a DECL | |
4799 | tree with another tree while duplicating the function's | |
4800 | body, TREE_MAP represents the mapping between these | |
ea99e0be JH |
4801 | trees. If UPDATE_CLONES is set, the call_stmt fields |
4802 | of edges of clones of the function will be updated. */ | |
19734dd8 | 4803 | void |
27dbd3ac RH |
4804 | tree_function_versioning (tree old_decl, tree new_decl, |
4805 | VEC(ipa_replace_map_p,gc)* tree_map, | |
c6f7cfc1 | 4806 | bool update_clones, bitmap args_to_skip) |
19734dd8 RL |
4807 | { |
4808 | struct cgraph_node *old_version_node; | |
4809 | struct cgraph_node *new_version_node; | |
1b369fae | 4810 | copy_body_data id; |
110cfe1c | 4811 | tree p; |
19734dd8 RL |
4812 | unsigned i; |
4813 | struct ipa_replace_map *replace_info; | |
b5b8b0ac | 4814 | basic_block old_entry_block, bb; |
0f1961a2 JH |
4815 | VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10); |
4816 | ||
19734dd8 | 4817 | tree t_step; |
873aa8f5 | 4818 | tree old_current_function_decl = current_function_decl; |
0f1961a2 | 4819 | tree vars = NULL_TREE; |
19734dd8 RL |
4820 | |
4821 | gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL | |
4822 | && TREE_CODE (new_decl) == FUNCTION_DECL); | |
4823 | DECL_POSSIBLY_INLINED (old_decl) = 1; | |
4824 | ||
4825 | old_version_node = cgraph_node (old_decl); | |
4826 | new_version_node = cgraph_node (new_decl); | |
4827 | ||
a3aadcc5 JH |
4828 | /* Output the inlining info for this abstract function, since it has been |
4829 | inlined. If we don't do this now, we can lose the information about the | |
4830 | variables in the function when the blocks get blown away as soon as we | |
4831 | remove the cgraph node. */ | |
4832 | (*debug_hooks->outlining_inline_function) (old_decl); | |
4833 | ||
19734dd8 RL |
4834 | DECL_ARTIFICIAL (new_decl) = 1; |
4835 | DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl); | |
f9417da1 | 4836 | DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl); |
19734dd8 | 4837 | |
3d283195 JH |
4838 | /* Prepare the data structures for the tree copy. */ |
4839 | memset (&id, 0, sizeof (id)); | |
4840 | ||
19734dd8 | 4841 | /* Generate a new name for the new version. */ |
9187e02d | 4842 | id.statements_to_fold = pointer_set_create (); |
b5b8b0ac | 4843 | |
6be42dd4 | 4844 | id.decl_map = pointer_map_create (); |
b5b8b0ac | 4845 | id.debug_map = NULL; |
1b369fae RH |
4846 | id.src_fn = old_decl; |
4847 | id.dst_fn = new_decl; | |
4848 | id.src_node = old_version_node; | |
4849 | id.dst_node = new_version_node; | |
4850 | id.src_cfun = DECL_STRUCT_FUNCTION (old_decl); | |
19734dd8 | 4851 | |
1b369fae RH |
4852 | id.copy_decl = copy_decl_no_change; |
4853 | id.transform_call_graph_edges | |
4854 | = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE; | |
4855 | id.transform_new_cfg = true; | |
4856 | id.transform_return_to_modify = false; | |
9ff420f1 | 4857 | id.transform_lang_insert_block = NULL; |
1b369fae | 4858 | |
19734dd8 | 4859 | current_function_decl = new_decl; |
110cfe1c JH |
4860 | old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION |
4861 | (DECL_STRUCT_FUNCTION (old_decl)); | |
4862 | initialize_cfun (new_decl, old_decl, | |
4863 | old_entry_block->count, | |
4864 | old_entry_block->frequency); | |
4865 | push_cfun (DECL_STRUCT_FUNCTION (new_decl)); | |
19734dd8 RL |
4866 | |
4867 | /* Copy the function's static chain. */ | |
4868 | p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl; | |
4869 | if (p) | |
4870 | DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl = | |
4871 | copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl, | |
4872 | &id); | |
0f1961a2 | 4873 | |
19734dd8 RL |
4874 | /* If there's a tree_map, prepare for substitution. */ |
4875 | if (tree_map) | |
9187e02d | 4876 | for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++) |
19734dd8 | 4877 | { |
0f1961a2 | 4878 | gimple init; |
9187e02d | 4879 | replace_info = VEC_index (ipa_replace_map_p, tree_map, i); |
1b369fae | 4880 | if (replace_info->replace_p) |
00fc2333 | 4881 | { |
657c0925 JH |
4882 | tree op = replace_info->new_tree; |
4883 | ||
4884 | STRIP_NOPS (op); | |
4885 | ||
4886 | if (TREE_CODE (op) == VIEW_CONVERT_EXPR) | |
4887 | op = TREE_OPERAND (op, 0); | |
4888 | ||
4889 | if (TREE_CODE (op) == ADDR_EXPR) | |
00fc2333 | 4890 | { |
657c0925 | 4891 | op = TREE_OPERAND (op, 0); |
00fc2333 JH |
4892 | while (handled_component_p (op)) |
4893 | op = TREE_OPERAND (op, 0); | |
4894 | if (TREE_CODE (op) == VAR_DECL) | |
4895 | add_referenced_var (op); | |
4896 | } | |
0f1961a2 JH |
4897 | gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL); |
4898 | init = setup_one_parameter (&id, replace_info->old_tree, | |
4899 | replace_info->new_tree, id.src_fn, | |
4900 | NULL, | |
4901 | &vars); | |
4902 | if (init) | |
4903 | VEC_safe_push (gimple, heap, init_stmts, init); | |
00fc2333 | 4904 | } |
19734dd8 | 4905 | } |
eb50f5f4 JH |
4906 | /* Copy the function's arguments. */ |
4907 | if (DECL_ARGUMENTS (old_decl) != NULL_TREE) | |
4908 | DECL_ARGUMENTS (new_decl) = | |
4909 | copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id, | |
4910 | args_to_skip, &vars); | |
4911 | ||
4912 | DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id); | |
4913 | ||
4914 | /* Renumber the lexical scoping (non-code) blocks consecutively. */ | |
4915 | number_blocks (id.dst_fn); | |
19734dd8 | 4916 | |
0f1961a2 | 4917 | declare_inline_vars (DECL_INITIAL (new_decl), vars); |
9187e02d | 4918 | |
cb91fab0 | 4919 | if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE) |
19734dd8 | 4920 | /* Add local vars. */ |
cb91fab0 | 4921 | for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls; |
19734dd8 RL |
4922 | t_step; t_step = TREE_CHAIN (t_step)) |
4923 | { | |
4924 | tree var = TREE_VALUE (t_step); | |
4925 | if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var)) | |
cb91fab0 | 4926 | cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls); |
526d73ab | 4927 | else if (!can_be_nonlocal (var, &id)) |
cb91fab0 | 4928 | cfun->local_decls = |
19734dd8 | 4929 | tree_cons (NULL_TREE, remap_decl (var, &id), |
cb91fab0 | 4930 | cfun->local_decls); |
19734dd8 RL |
4931 | } |
4932 | ||
4933 | /* Copy the Function's body. */ | |
27dbd3ac RH |
4934 | copy_body (&id, old_entry_block->count, old_entry_block->frequency, |
4935 | ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR); | |
19734dd8 | 4936 | |
19734dd8 RL |
4937 | if (DECL_RESULT (old_decl) != NULL_TREE) |
4938 | { | |
4939 | tree *res_decl = &DECL_RESULT (old_decl); | |
4940 | DECL_RESULT (new_decl) = remap_decl (*res_decl, &id); | |
4941 | lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl)); | |
4942 | } | |
4943 | ||
19734dd8 RL |
4944 | /* Renumber the lexical scoping (non-code) blocks consecutively. */ |
4945 | number_blocks (new_decl); | |
4946 | ||
b5b8b0ac AO |
4947 | /* We want to create the BB unconditionally, so that the addition of |
4948 | debug stmts doesn't affect BB count, which may in the end cause | |
4949 | codegen differences. */ | |
4950 | bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR)); | |
4951 | while (VEC_length (gimple, init_stmts)) | |
4952 | insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts)); | |
08ad1d6d | 4953 | update_clone_info (&id); |
0f1961a2 | 4954 | |
27dbd3ac RH |
4955 | /* Remap the nonlocal_goto_save_area, if any. */ |
4956 | if (cfun->nonlocal_goto_save_area) | |
4957 | { | |
4958 | struct walk_stmt_info wi; | |
4959 | ||
4960 | memset (&wi, 0, sizeof (wi)); | |
4961 | wi.info = &id; | |
4962 | walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL); | |
4963 | } | |
4964 | ||
19734dd8 | 4965 | /* Clean up. */ |
6be42dd4 | 4966 | pointer_map_destroy (id.decl_map); |
b5b8b0ac AO |
4967 | if (id.debug_map) |
4968 | pointer_map_destroy (id.debug_map); | |
5006671f RG |
4969 | free_dominance_info (CDI_DOMINATORS); |
4970 | free_dominance_info (CDI_POST_DOMINATORS); | |
9187e02d JH |
4971 | |
4972 | fold_marked_statements (0, id.statements_to_fold); | |
4973 | pointer_set_destroy (id.statements_to_fold); | |
4974 | fold_cond_expr_cond (); | |
4975 | delete_unreachable_blocks_update_callgraph (&id); | |
4976 | update_ssa (TODO_update_ssa); | |
4977 | free_dominance_info (CDI_DOMINATORS); | |
4978 | free_dominance_info (CDI_POST_DOMINATORS); | |
4979 | ||
b5b8b0ac | 4980 | gcc_assert (!id.debug_stmts); |
0f1961a2 | 4981 | VEC_free (gimple, heap, init_stmts); |
110cfe1c | 4982 | pop_cfun (); |
873aa8f5 JH |
4983 | current_function_decl = old_current_function_decl; |
4984 | gcc_assert (!current_function_decl | |
4985 | || DECL_STRUCT_FUNCTION (current_function_decl) == cfun); | |
19734dd8 RL |
4986 | return; |
4987 | } | |
4988 | ||
f82a627c EB |
4989 | /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate |
4990 | the callee and return the inlined body on success. */ | |
4991 | ||
4992 | tree | |
4993 | maybe_inline_call_in_expr (tree exp) | |
4994 | { | |
4995 | tree fn = get_callee_fndecl (exp); | |
4996 | ||
4997 | /* We can only try to inline "const" functions. */ | |
4998 | if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn)) | |
4999 | { | |
5000 | struct pointer_map_t *decl_map = pointer_map_create (); | |
5001 | call_expr_arg_iterator iter; | |
5002 | copy_body_data id; | |
5003 | tree param, arg, t; | |
5004 | ||
5005 | /* Remap the parameters. */ | |
5006 | for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter); | |
5007 | param; | |
5008 | param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter)) | |
5009 | *pointer_map_insert (decl_map, param) = arg; | |
5010 | ||
5011 | memset (&id, 0, sizeof (id)); | |
5012 | id.src_fn = fn; | |
5013 | id.dst_fn = current_function_decl; | |
5014 | id.src_cfun = DECL_STRUCT_FUNCTION (fn); | |
5015 | id.decl_map = decl_map; | |
5016 | ||
5017 | id.copy_decl = copy_decl_no_change; | |
5018 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
5019 | id.transform_new_cfg = false; | |
5020 | id.transform_return_to_modify = true; | |
5021 | id.transform_lang_insert_block = false; | |
5022 | ||
5023 | /* Make sure not to unshare trees behind the front-end's back | |
5024 | since front-end specific mechanisms may rely on sharing. */ | |
5025 | id.regimplify = false; | |
5026 | id.do_not_unshare = true; | |
5027 | ||
5028 | /* We're not inside any EH region. */ | |
1d65f45c | 5029 | id.eh_lp_nr = 0; |
f82a627c EB |
5030 | |
5031 | t = copy_tree_body (&id); | |
5032 | pointer_map_destroy (decl_map); | |
5033 | ||
5034 | /* We can only return something suitable for use in a GENERIC | |
5035 | expression tree. */ | |
5036 | if (TREE_CODE (t) == MODIFY_EXPR) | |
5037 | return TREE_OPERAND (t, 1); | |
5038 | } | |
5039 | ||
5040 | return NULL_TREE; | |
5041 | } | |
5042 | ||
52dd234b RH |
5043 | /* Duplicate a type, fields and all. */ |
5044 | ||
5045 | tree | |
5046 | build_duplicate_type (tree type) | |
5047 | { | |
1b369fae | 5048 | struct copy_body_data id; |
52dd234b RH |
5049 | |
5050 | memset (&id, 0, sizeof (id)); | |
1b369fae RH |
5051 | id.src_fn = current_function_decl; |
5052 | id.dst_fn = current_function_decl; | |
5053 | id.src_cfun = cfun; | |
6be42dd4 | 5054 | id.decl_map = pointer_map_create (); |
b5b8b0ac | 5055 | id.debug_map = NULL; |
4009f2e7 | 5056 | id.copy_decl = copy_decl_no_change; |
52dd234b RH |
5057 | |
5058 | type = remap_type_1 (type, &id); | |
5059 | ||
6be42dd4 | 5060 | pointer_map_destroy (id.decl_map); |
b5b8b0ac AO |
5061 | if (id.debug_map) |
5062 | pointer_map_destroy (id.debug_map); | |
52dd234b | 5063 | |
f31c9f09 DG |
5064 | TYPE_CANONICAL (type) = type; |
5065 | ||
52dd234b RH |
5066 | return type; |
5067 | } | |
ab442df7 MM |
5068 | |
5069 | /* Return whether it is safe to inline a function because it used different | |
6eb29714 XDL |
5070 | target specific options or call site actual types mismatch parameter types. |
5071 | E is the call edge to be checked. */ | |
ab442df7 | 5072 | bool |
6eb29714 | 5073 | tree_can_inline_p (struct cgraph_edge *e) |
ab442df7 | 5074 | { |
5779e713 MM |
5075 | #if 0 |
5076 | /* This causes a regression in SPEC in that it prevents a cold function from | |
5077 | inlining a hot function. Perhaps this should only apply to functions | |
5078 | that the user declares hot/cold/optimize explicitly. */ | |
5079 | ||
ab442df7 MM |
5080 | /* Don't inline a function with a higher optimization level than the |
5081 | caller, or with different space constraints (hot/cold functions). */ | |
5082 | tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller); | |
5083 | tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee); | |
5084 | ||
5085 | if (caller_tree != callee_tree) | |
5086 | { | |
5087 | struct cl_optimization *caller_opt | |
5088 | = TREE_OPTIMIZATION ((caller_tree) | |
5089 | ? caller_tree | |
5090 | : optimization_default_node); | |
5091 | ||
5092 | struct cl_optimization *callee_opt | |
5093 | = TREE_OPTIMIZATION ((callee_tree) | |
5094 | ? callee_tree | |
5095 | : optimization_default_node); | |
5096 | ||
5097 | if ((caller_opt->optimize > callee_opt->optimize) | |
5098 | || (caller_opt->optimize_size != callee_opt->optimize_size)) | |
5099 | return false; | |
5100 | } | |
5779e713 | 5101 | #endif |
6eb29714 XDL |
5102 | tree caller, callee; |
5103 | ||
5104 | caller = e->caller->decl; | |
5105 | callee = e->callee->decl; | |
ab442df7 | 5106 | |
f9417da1 RG |
5107 | /* We cannot inline a function that uses a different EH personality |
5108 | than the caller. */ | |
5109 | if (DECL_FUNCTION_PERSONALITY (caller) | |
5110 | && DECL_FUNCTION_PERSONALITY (callee) | |
5111 | && (DECL_FUNCTION_PERSONALITY (caller) | |
5112 | != DECL_FUNCTION_PERSONALITY (callee))) | |
5113 | { | |
5114 | e->inline_failed = CIF_UNSPECIFIED; | |
5115 | gimple_call_set_cannot_inline (e->call_stmt, true); | |
5116 | return false; | |
5117 | } | |
5118 | ||
ab442df7 | 5119 | /* Allow the backend to decide if inlining is ok. */ |
6eb29714 XDL |
5120 | if (!targetm.target_option.can_inline_p (caller, callee)) |
5121 | { | |
5122 | e->inline_failed = CIF_TARGET_OPTION_MISMATCH; | |
5123 | gimple_call_set_cannot_inline (e->call_stmt, true); | |
d7f09764 | 5124 | e->call_stmt_cannot_inline_p = true; |
6eb29714 XDL |
5125 | return false; |
5126 | } | |
5127 | ||
d7f09764 DN |
5128 | if (e->call_stmt |
5129 | && !gimple_check_call_args (e->call_stmt)) | |
6eb29714 XDL |
5130 | { |
5131 | e->inline_failed = CIF_MISMATCHED_ARGUMENTS; | |
5132 | gimple_call_set_cannot_inline (e->call_stmt, true); | |
d7f09764 | 5133 | e->call_stmt_cannot_inline_p = true; |
6eb29714 XDL |
5134 | return false; |
5135 | } | |
5136 | ||
5137 | return true; | |
ab442df7 | 5138 | } |