]>
Commit | Line | Data |
---|---|---|
ac534736 | 1 | /* Tree inlining. |
082ab5ff | 2 | Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 |
ebb07520 | 3 | Free Software Foundation, Inc. |
588d3ade AO |
4 | Contributed by Alexandre Oliva <aoliva@redhat.com> |
5 | ||
54a7b573 | 6 | This file is part of GCC. |
588d3ade | 7 | |
54a7b573 | 8 | GCC is free software; you can redistribute it and/or modify |
588d3ade | 9 | it under the terms of the GNU General Public License as published by |
9dcd6f09 | 10 | the Free Software Foundation; either version 3, or (at your option) |
588d3ade AO |
11 | any later version. |
12 | ||
54a7b573 | 13 | GCC is distributed in the hope that it will be useful, |
588d3ade AO |
14 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
15 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
16 | GNU General Public License for more details. | |
17 | ||
18 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
588d3ade AO |
21 | |
22 | #include "config.h" | |
23 | #include "system.h" | |
4977bab6 ZW |
24 | #include "coretypes.h" |
25 | #include "tm.h" | |
69dcadff | 26 | #include "toplev.h" |
588d3ade AO |
27 | #include "tree.h" |
28 | #include "tree-inline.h" | |
d4e4baa9 AO |
29 | #include "rtl.h" |
30 | #include "expr.h" | |
31 | #include "flags.h" | |
32 | #include "params.h" | |
33 | #include "input.h" | |
34 | #include "insn-config.h" | |
d4e4baa9 | 35 | #include "hashtab.h" |
d23c55c2 | 36 | #include "langhooks.h" |
e21aff8a SB |
37 | #include "basic-block.h" |
38 | #include "tree-iterator.h" | |
1c4a429a | 39 | #include "cgraph.h" |
ddd2d57e | 40 | #include "intl.h" |
6de9cd9a | 41 | #include "tree-mudflap.h" |
089efaa4 | 42 | #include "tree-flow.h" |
18c6ada9 | 43 | #include "function.h" |
e21aff8a | 44 | #include "tree-flow.h" |
6de9cd9a | 45 | #include "diagnostic.h" |
e21aff8a | 46 | #include "except.h" |
1eb3331e | 47 | #include "debug.h" |
e21aff8a | 48 | #include "pointer-set.h" |
19734dd8 | 49 | #include "ipa-prop.h" |
6946b3f7 | 50 | #include "value-prof.h" |
110cfe1c | 51 | #include "tree-pass.h" |
18177c7e RG |
52 | #include "target.h" |
53 | #include "integrate.h" | |
d4e4baa9 | 54 | |
6de9cd9a DN |
55 | /* I'm not real happy about this, but we need to handle gimple and |
56 | non-gimple trees. */ | |
726a989a | 57 | #include "gimple.h" |
588d3ade | 58 | |
1b369fae | 59 | /* Inlining, Cloning, Versioning, Parallelization |
e21aff8a SB |
60 | |
61 | Inlining: a function body is duplicated, but the PARM_DECLs are | |
62 | remapped into VAR_DECLs, and non-void RETURN_EXPRs become | |
726a989a | 63 | MODIFY_EXPRs that store to a dedicated returned-value variable. |
e21aff8a SB |
64 | The duplicated eh_region info of the copy will later be appended |
65 | to the info for the caller; the eh_region info in copied throwing | |
1d65f45c | 66 | statements and RESX statements are adjusted accordingly. |
e21aff8a | 67 | |
e21aff8a SB |
68 | Cloning: (only in C++) We have one body for a con/de/structor, and |
69 | multiple function decls, each with a unique parameter list. | |
70 | Duplicate the body, using the given splay tree; some parameters | |
71 | will become constants (like 0 or 1). | |
72 | ||
1b369fae RH |
73 | Versioning: a function body is duplicated and the result is a new |
74 | function rather than into blocks of an existing function as with | |
75 | inlining. Some parameters will become constants. | |
76 | ||
77 | Parallelization: a region of a function is duplicated resulting in | |
78 | a new function. Variables may be replaced with complex expressions | |
79 | to enable shared variable semantics. | |
80 | ||
e21aff8a SB |
81 | All of these will simultaneously lookup any callgraph edges. If |
82 | we're going to inline the duplicated function body, and the given | |
83 | function has some cloned callgraph nodes (one for each place this | |
84 | function will be inlined) those callgraph edges will be duplicated. | |
1b369fae | 85 | If we're cloning the body, those callgraph edges will be |
e21aff8a SB |
86 | updated to point into the new body. (Note that the original |
87 | callgraph node and edge list will not be altered.) | |
88 | ||
726a989a | 89 | See the CALL_EXPR handling case in copy_tree_body_r (). */ |
e21aff8a | 90 | |
d4e4baa9 AO |
91 | /* To Do: |
92 | ||
93 | o In order to make inlining-on-trees work, we pessimized | |
94 | function-local static constants. In particular, they are now | |
95 | always output, even when not addressed. Fix this by treating | |
96 | function-local static constants just like global static | |
97 | constants; the back-end already knows not to output them if they | |
98 | are not needed. | |
99 | ||
100 | o Provide heuristics to clamp inlining of recursive template | |
101 | calls? */ | |
102 | ||
7f9bc51b | 103 | |
7f9bc51b ZD |
104 | /* Weights that estimate_num_insns uses to estimate the size of the |
105 | produced code. */ | |
106 | ||
107 | eni_weights eni_size_weights; | |
108 | ||
109 | /* Weights that estimate_num_insns uses to estimate the time necessary | |
110 | to execute the produced code. */ | |
111 | ||
112 | eni_weights eni_time_weights; | |
113 | ||
d4e4baa9 AO |
114 | /* Prototypes. */ |
115 | ||
0f900dfa | 116 | static tree declare_return_variable (copy_body_data *, tree, tree); |
1b369fae | 117 | static void remap_block (tree *, copy_body_data *); |
1b369fae | 118 | static void copy_bind_expr (tree *, int *, copy_body_data *); |
6de9cd9a | 119 | static tree mark_local_for_remap_r (tree *, int *, void *); |
19114537 | 120 | static void unsave_expr_1 (tree); |
6de9cd9a | 121 | static tree unsave_r (tree *, int *, void *); |
e21aff8a | 122 | static void declare_inline_vars (tree, tree); |
892c7e1e | 123 | static void remap_save_expr (tree *, void *, int *); |
4a283090 | 124 | static void prepend_lexical_block (tree current_block, tree new_block); |
1b369fae | 125 | static tree copy_decl_to_var (tree, copy_body_data *); |
c08cd4c1 | 126 | static tree copy_result_decl_to_var (tree, copy_body_data *); |
1b369fae | 127 | static tree copy_decl_maybe_to_var (tree, copy_body_data *); |
726a989a | 128 | static gimple remap_gimple_stmt (gimple, copy_body_data *); |
078c3644 | 129 | static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id); |
e21aff8a | 130 | |
5e20bdd7 JZ |
131 | /* Insert a tree->tree mapping for ID. Despite the name suggests |
132 | that the trees should be variables, it is used for more than that. */ | |
133 | ||
1b369fae RH |
134 | void |
135 | insert_decl_map (copy_body_data *id, tree key, tree value) | |
5e20bdd7 | 136 | { |
6be42dd4 | 137 | *pointer_map_insert (id->decl_map, key) = value; |
5e20bdd7 JZ |
138 | |
139 | /* Always insert an identity map as well. If we see this same new | |
140 | node again, we won't want to duplicate it a second time. */ | |
141 | if (key != value) | |
6be42dd4 | 142 | *pointer_map_insert (id->decl_map, value) = value; |
5e20bdd7 JZ |
143 | } |
144 | ||
b5b8b0ac AO |
145 | /* Insert a tree->tree mapping for ID. This is only used for |
146 | variables. */ | |
147 | ||
148 | static void | |
149 | insert_debug_decl_map (copy_body_data *id, tree key, tree value) | |
150 | { | |
151 | if (!gimple_in_ssa_p (id->src_cfun)) | |
152 | return; | |
153 | ||
154 | if (!MAY_HAVE_DEBUG_STMTS) | |
155 | return; | |
156 | ||
157 | if (!target_for_debug_bind (key)) | |
158 | return; | |
159 | ||
160 | gcc_assert (TREE_CODE (key) == PARM_DECL); | |
161 | gcc_assert (TREE_CODE (value) == VAR_DECL); | |
162 | ||
163 | if (!id->debug_map) | |
164 | id->debug_map = pointer_map_create (); | |
165 | ||
166 | *pointer_map_insert (id->debug_map, key) = value; | |
167 | } | |
168 | ||
082ab5ff JJ |
169 | /* If nonzero, we're remapping the contents of inlined debug |
170 | statements. If negative, an error has occurred, such as a | |
171 | reference to a variable that isn't available in the inlined | |
172 | context. */ | |
173 | static int processing_debug_stmt = 0; | |
174 | ||
110cfe1c JH |
175 | /* Construct new SSA name for old NAME. ID is the inline context. */ |
176 | ||
177 | static tree | |
178 | remap_ssa_name (tree name, copy_body_data *id) | |
179 | { | |
82d6e6fc | 180 | tree new_tree; |
6be42dd4 | 181 | tree *n; |
110cfe1c JH |
182 | |
183 | gcc_assert (TREE_CODE (name) == SSA_NAME); | |
184 | ||
6be42dd4 | 185 | n = (tree *) pointer_map_contains (id->decl_map, name); |
110cfe1c | 186 | if (n) |
129a37fc | 187 | return unshare_expr (*n); |
110cfe1c | 188 | |
082ab5ff JJ |
189 | if (processing_debug_stmt) |
190 | { | |
191 | processing_debug_stmt = -1; | |
192 | return name; | |
193 | } | |
194 | ||
110cfe1c JH |
195 | /* Do not set DEF_STMT yet as statement is not copied yet. We do that |
196 | in copy_bb. */ | |
82d6e6fc | 197 | new_tree = remap_decl (SSA_NAME_VAR (name), id); |
726a989a | 198 | |
110cfe1c | 199 | /* We might've substituted constant or another SSA_NAME for |
b8698a0f | 200 | the variable. |
110cfe1c JH |
201 | |
202 | Replace the SSA name representing RESULT_DECL by variable during | |
203 | inlining: this saves us from need to introduce PHI node in a case | |
204 | return value is just partly initialized. */ | |
82d6e6fc | 205 | if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL) |
110cfe1c JH |
206 | && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL |
207 | || !id->transform_return_to_modify)) | |
208 | { | |
25a6a873 | 209 | struct ptr_info_def *pi; |
82d6e6fc KG |
210 | new_tree = make_ssa_name (new_tree, NULL); |
211 | insert_decl_map (id, name, new_tree); | |
212 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree) | |
110cfe1c | 213 | = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name); |
82d6e6fc | 214 | TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree)); |
25a6a873 RG |
215 | /* At least IPA points-to info can be directly transferred. */ |
216 | if (id->src_cfun->gimple_df | |
217 | && id->src_cfun->gimple_df->ipa_pta | |
218 | && (pi = SSA_NAME_PTR_INFO (name)) | |
219 | && !pi->pt.anything) | |
220 | { | |
221 | struct ptr_info_def *new_pi = get_ptr_info (new_tree); | |
222 | new_pi->pt = pi->pt; | |
223 | } | |
726a989a | 224 | if (gimple_nop_p (SSA_NAME_DEF_STMT (name))) |
045685a9 JH |
225 | { |
226 | /* By inlining function having uninitialized variable, we might | |
227 | extend the lifetime (variable might get reused). This cause | |
228 | ICE in the case we end up extending lifetime of SSA name across | |
fa10beec | 229 | abnormal edge, but also increase register pressure. |
045685a9 | 230 | |
726a989a RB |
231 | We simply initialize all uninitialized vars by 0 except |
232 | for case we are inlining to very first BB. We can avoid | |
233 | this for all BBs that are not inside strongly connected | |
234 | regions of the CFG, but this is expensive to test. */ | |
235 | if (id->entry_bb | |
236 | && is_gimple_reg (SSA_NAME_VAR (name)) | |
045685a9 | 237 | && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL |
0723b99a | 238 | && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest |
045685a9 JH |
239 | || EDGE_COUNT (id->entry_bb->preds) != 1)) |
240 | { | |
726a989a RB |
241 | gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb); |
242 | gimple init_stmt; | |
b8698a0f | 243 | |
82d6e6fc KG |
244 | init_stmt = gimple_build_assign (new_tree, |
245 | fold_convert (TREE_TYPE (new_tree), | |
045685a9 | 246 | integer_zero_node)); |
726a989a | 247 | gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT); |
82d6e6fc | 248 | SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0; |
045685a9 JH |
249 | } |
250 | else | |
251 | { | |
82d6e6fc | 252 | SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop (); |
726a989a RB |
253 | if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) |
254 | == name) | |
82d6e6fc | 255 | set_default_def (SSA_NAME_VAR (new_tree), new_tree); |
045685a9 JH |
256 | } |
257 | } | |
110cfe1c JH |
258 | } |
259 | else | |
82d6e6fc KG |
260 | insert_decl_map (id, name, new_tree); |
261 | return new_tree; | |
110cfe1c JH |
262 | } |
263 | ||
e21aff8a | 264 | /* Remap DECL during the copying of the BLOCK tree for the function. */ |
d4e4baa9 | 265 | |
1b369fae RH |
266 | tree |
267 | remap_decl (tree decl, copy_body_data *id) | |
d4e4baa9 | 268 | { |
6be42dd4 | 269 | tree *n; |
e21aff8a SB |
270 | |
271 | /* We only remap local variables in the current function. */ | |
3c2a7a6a | 272 | |
e21aff8a SB |
273 | /* See if we have remapped this declaration. */ |
274 | ||
6be42dd4 | 275 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
e21aff8a | 276 | |
b5b8b0ac AO |
277 | if (!n && processing_debug_stmt) |
278 | { | |
279 | processing_debug_stmt = -1; | |
280 | return decl; | |
281 | } | |
282 | ||
e21aff8a SB |
283 | /* If we didn't already have an equivalent for this declaration, |
284 | create one now. */ | |
d4e4baa9 AO |
285 | if (!n) |
286 | { | |
d4e4baa9 | 287 | /* Make a copy of the variable or label. */ |
1b369fae | 288 | tree t = id->copy_decl (decl, id); |
b8698a0f | 289 | |
596b98ce AO |
290 | /* Remember it, so that if we encounter this local entity again |
291 | we can reuse this copy. Do this early because remap_type may | |
292 | need this decl for TYPE_STUB_DECL. */ | |
293 | insert_decl_map (id, decl, t); | |
294 | ||
1b369fae RH |
295 | if (!DECL_P (t)) |
296 | return t; | |
297 | ||
3c2a7a6a RH |
298 | /* Remap types, if necessary. */ |
299 | TREE_TYPE (t) = remap_type (TREE_TYPE (t), id); | |
300 | if (TREE_CODE (t) == TYPE_DECL) | |
301 | DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id); | |
3c2a7a6a RH |
302 | |
303 | /* Remap sizes as necessary. */ | |
726a989a RB |
304 | walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL); |
305 | walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL); | |
d4e4baa9 | 306 | |
8c27b7d4 | 307 | /* If fields, do likewise for offset and qualifier. */ |
5377d5ba RK |
308 | if (TREE_CODE (t) == FIELD_DECL) |
309 | { | |
726a989a | 310 | walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL); |
5377d5ba | 311 | if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE) |
726a989a | 312 | walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL); |
5377d5ba RK |
313 | } |
314 | ||
110cfe1c JH |
315 | if (cfun && gimple_in_ssa_p (cfun) |
316 | && (TREE_CODE (t) == VAR_DECL | |
317 | || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL)) | |
318 | { | |
110cfe1c | 319 | get_var_ann (t); |
110cfe1c JH |
320 | add_referenced_var (t); |
321 | } | |
5e20bdd7 | 322 | return t; |
d4e4baa9 AO |
323 | } |
324 | ||
f82a627c EB |
325 | if (id->do_not_unshare) |
326 | return *n; | |
327 | else | |
328 | return unshare_expr (*n); | |
d4e4baa9 AO |
329 | } |
330 | ||
3c2a7a6a | 331 | static tree |
1b369fae | 332 | remap_type_1 (tree type, copy_body_data *id) |
3c2a7a6a | 333 | { |
82d6e6fc | 334 | tree new_tree, t; |
3c2a7a6a | 335 | |
ed397c43 RK |
336 | /* We do need a copy. build and register it now. If this is a pointer or |
337 | reference type, remap the designated type and make a new pointer or | |
338 | reference type. */ | |
339 | if (TREE_CODE (type) == POINTER_TYPE) | |
340 | { | |
82d6e6fc | 341 | new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id), |
ed397c43 RK |
342 | TYPE_MODE (type), |
343 | TYPE_REF_CAN_ALIAS_ALL (type)); | |
040c6d51 MM |
344 | if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type)) |
345 | new_tree = build_type_attribute_qual_variant (new_tree, | |
346 | TYPE_ATTRIBUTES (type), | |
347 | TYPE_QUALS (type)); | |
82d6e6fc KG |
348 | insert_decl_map (id, type, new_tree); |
349 | return new_tree; | |
ed397c43 RK |
350 | } |
351 | else if (TREE_CODE (type) == REFERENCE_TYPE) | |
352 | { | |
82d6e6fc | 353 | new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id), |
ed397c43 RK |
354 | TYPE_MODE (type), |
355 | TYPE_REF_CAN_ALIAS_ALL (type)); | |
040c6d51 MM |
356 | if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type)) |
357 | new_tree = build_type_attribute_qual_variant (new_tree, | |
358 | TYPE_ATTRIBUTES (type), | |
359 | TYPE_QUALS (type)); | |
82d6e6fc KG |
360 | insert_decl_map (id, type, new_tree); |
361 | return new_tree; | |
ed397c43 RK |
362 | } |
363 | else | |
82d6e6fc | 364 | new_tree = copy_node (type); |
ed397c43 | 365 | |
82d6e6fc | 366 | insert_decl_map (id, type, new_tree); |
3c2a7a6a RH |
367 | |
368 | /* This is a new type, not a copy of an old type. Need to reassociate | |
369 | variants. We can handle everything except the main variant lazily. */ | |
370 | t = TYPE_MAIN_VARIANT (type); | |
371 | if (type != t) | |
372 | { | |
373 | t = remap_type (t, id); | |
82d6e6fc KG |
374 | TYPE_MAIN_VARIANT (new_tree) = t; |
375 | TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t); | |
376 | TYPE_NEXT_VARIANT (t) = new_tree; | |
3c2a7a6a RH |
377 | } |
378 | else | |
379 | { | |
82d6e6fc KG |
380 | TYPE_MAIN_VARIANT (new_tree) = new_tree; |
381 | TYPE_NEXT_VARIANT (new_tree) = NULL; | |
3c2a7a6a RH |
382 | } |
383 | ||
596b98ce | 384 | if (TYPE_STUB_DECL (type)) |
82d6e6fc | 385 | TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id); |
596b98ce | 386 | |
3c2a7a6a | 387 | /* Lazily create pointer and reference types. */ |
82d6e6fc KG |
388 | TYPE_POINTER_TO (new_tree) = NULL; |
389 | TYPE_REFERENCE_TO (new_tree) = NULL; | |
3c2a7a6a | 390 | |
82d6e6fc | 391 | switch (TREE_CODE (new_tree)) |
3c2a7a6a RH |
392 | { |
393 | case INTEGER_TYPE: | |
394 | case REAL_TYPE: | |
325217ed | 395 | case FIXED_POINT_TYPE: |
3c2a7a6a RH |
396 | case ENUMERAL_TYPE: |
397 | case BOOLEAN_TYPE: | |
82d6e6fc | 398 | t = TYPE_MIN_VALUE (new_tree); |
3c2a7a6a | 399 | if (t && TREE_CODE (t) != INTEGER_CST) |
82d6e6fc | 400 | walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL); |
1c9766da | 401 | |
82d6e6fc | 402 | t = TYPE_MAX_VALUE (new_tree); |
3c2a7a6a | 403 | if (t && TREE_CODE (t) != INTEGER_CST) |
82d6e6fc KG |
404 | walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL); |
405 | return new_tree; | |
9f63daea | 406 | |
3c2a7a6a | 407 | case FUNCTION_TYPE: |
82d6e6fc KG |
408 | TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id); |
409 | walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL); | |
410 | return new_tree; | |
3c2a7a6a RH |
411 | |
412 | case ARRAY_TYPE: | |
82d6e6fc KG |
413 | TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id); |
414 | TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id); | |
3c2a7a6a RH |
415 | break; |
416 | ||
417 | case RECORD_TYPE: | |
418 | case UNION_TYPE: | |
419 | case QUAL_UNION_TYPE: | |
52dd234b RH |
420 | { |
421 | tree f, nf = NULL; | |
422 | ||
82d6e6fc | 423 | for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f)) |
52dd234b RH |
424 | { |
425 | t = remap_decl (f, id); | |
82d6e6fc | 426 | DECL_CONTEXT (t) = new_tree; |
52dd234b RH |
427 | TREE_CHAIN (t) = nf; |
428 | nf = t; | |
429 | } | |
82d6e6fc | 430 | TYPE_FIELDS (new_tree) = nreverse (nf); |
52dd234b | 431 | } |
3c2a7a6a RH |
432 | break; |
433 | ||
3c2a7a6a RH |
434 | case OFFSET_TYPE: |
435 | default: | |
436 | /* Shouldn't have been thought variable sized. */ | |
1e128c5f | 437 | gcc_unreachable (); |
3c2a7a6a RH |
438 | } |
439 | ||
82d6e6fc KG |
440 | walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL); |
441 | walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL); | |
3c2a7a6a | 442 | |
82d6e6fc | 443 | return new_tree; |
3c2a7a6a RH |
444 | } |
445 | ||
1b369fae RH |
446 | tree |
447 | remap_type (tree type, copy_body_data *id) | |
52dd234b | 448 | { |
6be42dd4 | 449 | tree *node; |
4f5c64b8 | 450 | tree tmp; |
52dd234b RH |
451 | |
452 | if (type == NULL) | |
453 | return type; | |
454 | ||
455 | /* See if we have remapped this type. */ | |
6be42dd4 | 456 | node = (tree *) pointer_map_contains (id->decl_map, type); |
52dd234b | 457 | if (node) |
6be42dd4 | 458 | return *node; |
52dd234b RH |
459 | |
460 | /* The type only needs remapping if it's variably modified. */ | |
1b369fae | 461 | if (! variably_modified_type_p (type, id->src_fn)) |
52dd234b RH |
462 | { |
463 | insert_decl_map (id, type, type); | |
464 | return type; | |
465 | } | |
466 | ||
4f5c64b8 RG |
467 | id->remapping_type_depth++; |
468 | tmp = remap_type_1 (type, id); | |
469 | id->remapping_type_depth--; | |
470 | ||
471 | return tmp; | |
52dd234b RH |
472 | } |
473 | ||
13e4e36e L |
474 | /* Return previously remapped type of TYPE in ID. Return NULL if TYPE |
475 | is NULL or TYPE has not been remapped before. */ | |
476 | ||
477 | static tree | |
478 | remapped_type (tree type, copy_body_data *id) | |
479 | { | |
480 | tree *node; | |
481 | ||
482 | if (type == NULL) | |
483 | return type; | |
484 | ||
485 | /* See if we have remapped this type. */ | |
486 | node = (tree *) pointer_map_contains (id->decl_map, type); | |
487 | if (node) | |
488 | return *node; | |
489 | else | |
490 | return NULL; | |
491 | } | |
492 | ||
493 | /* The type only needs remapping if it's variably modified. */ | |
526d73ab | 494 | /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */ |
b8698a0f | 495 | |
526d73ab JH |
496 | static bool |
497 | can_be_nonlocal (tree decl, copy_body_data *id) | |
498 | { | |
499 | /* We can not duplicate function decls. */ | |
500 | if (TREE_CODE (decl) == FUNCTION_DECL) | |
501 | return true; | |
502 | ||
503 | /* Local static vars must be non-local or we get multiple declaration | |
504 | problems. */ | |
505 | if (TREE_CODE (decl) == VAR_DECL | |
506 | && !auto_var_in_fn_p (decl, id->src_fn)) | |
507 | return true; | |
508 | ||
509 | /* At the moment dwarf2out can handle only these types of nodes. We | |
510 | can support more later. */ | |
511 | if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL) | |
512 | return false; | |
513 | ||
13e4e36e L |
514 | /* We must use global type. We call remapped_type instead of |
515 | remap_type since we don't want to remap this type here if it | |
516 | hasn't been remapped before. */ | |
517 | if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id)) | |
526d73ab JH |
518 | return false; |
519 | ||
520 | /* Wihtout SSA we can't tell if variable is used. */ | |
521 | if (!gimple_in_ssa_p (cfun)) | |
522 | return false; | |
523 | ||
524 | /* Live variables must be copied so we can attach DECL_RTL. */ | |
525 | if (var_ann (decl)) | |
526 | return false; | |
527 | ||
528 | return true; | |
529 | } | |
530 | ||
6de9cd9a | 531 | static tree |
526d73ab | 532 | remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id) |
d4e4baa9 | 533 | { |
6de9cd9a DN |
534 | tree old_var; |
535 | tree new_decls = NULL_TREE; | |
d4e4baa9 | 536 | |
6de9cd9a DN |
537 | /* Remap its variables. */ |
538 | for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var)) | |
d4e4baa9 | 539 | { |
6de9cd9a DN |
540 | tree new_var; |
541 | ||
526d73ab | 542 | if (can_be_nonlocal (old_var, id)) |
30be951a | 543 | { |
526d73ab | 544 | if (TREE_CODE (old_var) == VAR_DECL |
5c3ec539 | 545 | && ! DECL_EXTERNAL (old_var) |
526d73ab JH |
546 | && (var_ann (old_var) || !gimple_in_ssa_p (cfun))) |
547 | cfun->local_decls = tree_cons (NULL_TREE, old_var, | |
548 | cfun->local_decls); | |
9e6aced0 | 549 | if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE) |
526d73ab JH |
550 | && !DECL_IGNORED_P (old_var) |
551 | && nonlocalized_list) | |
70235ab9 | 552 | VEC_safe_push (tree, gc, *nonlocalized_list, old_var); |
30be951a JH |
553 | continue; |
554 | } | |
555 | ||
6de9cd9a DN |
556 | /* Remap the variable. */ |
557 | new_var = remap_decl (old_var, id); | |
558 | ||
726a989a | 559 | /* If we didn't remap this variable, we can't mess with its |
6de9cd9a DN |
560 | TREE_CHAIN. If we remapped this variable to the return slot, it's |
561 | already declared somewhere else, so don't declare it here. */ | |
b8698a0f | 562 | |
526d73ab | 563 | if (new_var == id->retvar) |
6de9cd9a | 564 | ; |
526d73ab JH |
565 | else if (!new_var) |
566 | { | |
9e6aced0 | 567 | if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE) |
526d73ab JH |
568 | && !DECL_IGNORED_P (old_var) |
569 | && nonlocalized_list) | |
70235ab9 | 570 | VEC_safe_push (tree, gc, *nonlocalized_list, old_var); |
526d73ab | 571 | } |
d4e4baa9 AO |
572 | else |
573 | { | |
1e128c5f | 574 | gcc_assert (DECL_P (new_var)); |
6de9cd9a DN |
575 | TREE_CHAIN (new_var) = new_decls; |
576 | new_decls = new_var; | |
d4e4baa9 | 577 | } |
d4e4baa9 | 578 | } |
d4e4baa9 | 579 | |
6de9cd9a DN |
580 | return nreverse (new_decls); |
581 | } | |
582 | ||
583 | /* Copy the BLOCK to contain remapped versions of the variables | |
584 | therein. And hook the new block into the block-tree. */ | |
585 | ||
586 | static void | |
1b369fae | 587 | remap_block (tree *block, copy_body_data *id) |
6de9cd9a | 588 | { |
d436bff8 AH |
589 | tree old_block; |
590 | tree new_block; | |
d436bff8 AH |
591 | |
592 | /* Make the new block. */ | |
593 | old_block = *block; | |
594 | new_block = make_node (BLOCK); | |
595 | TREE_USED (new_block) = TREE_USED (old_block); | |
596 | BLOCK_ABSTRACT_ORIGIN (new_block) = old_block; | |
3e2844cb | 597 | BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block); |
526d73ab JH |
598 | BLOCK_NONLOCALIZED_VARS (new_block) |
599 | = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block)); | |
d436bff8 AH |
600 | *block = new_block; |
601 | ||
602 | /* Remap its variables. */ | |
526d73ab JH |
603 | BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), |
604 | &BLOCK_NONLOCALIZED_VARS (new_block), | |
605 | id); | |
d436bff8 | 606 | |
1b369fae | 607 | if (id->transform_lang_insert_block) |
9ff420f1 | 608 | id->transform_lang_insert_block (new_block); |
1b369fae | 609 | |
d436bff8 | 610 | /* Remember the remapped block. */ |
6de9cd9a | 611 | insert_decl_map (id, old_block, new_block); |
d4e4baa9 AO |
612 | } |
613 | ||
acb8f212 JH |
614 | /* Copy the whole block tree and root it in id->block. */ |
615 | static tree | |
1b369fae | 616 | remap_blocks (tree block, copy_body_data *id) |
acb8f212 JH |
617 | { |
618 | tree t; | |
82d6e6fc | 619 | tree new_tree = block; |
acb8f212 JH |
620 | |
621 | if (!block) | |
622 | return NULL; | |
623 | ||
82d6e6fc KG |
624 | remap_block (&new_tree, id); |
625 | gcc_assert (new_tree != block); | |
acb8f212 | 626 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) |
4a283090 JH |
627 | prepend_lexical_block (new_tree, remap_blocks (t, id)); |
628 | /* Blocks are in arbitrary order, but make things slightly prettier and do | |
629 | not swap order when producing a copy. */ | |
630 | BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree)); | |
82d6e6fc | 631 | return new_tree; |
acb8f212 JH |
632 | } |
633 | ||
d4e4baa9 | 634 | static void |
6de9cd9a | 635 | copy_statement_list (tree *tp) |
d4e4baa9 | 636 | { |
6de9cd9a | 637 | tree_stmt_iterator oi, ni; |
82d6e6fc | 638 | tree new_tree; |
6de9cd9a | 639 | |
82d6e6fc KG |
640 | new_tree = alloc_stmt_list (); |
641 | ni = tsi_start (new_tree); | |
6de9cd9a | 642 | oi = tsi_start (*tp); |
b1d82db0 | 643 | TREE_TYPE (new_tree) = TREE_TYPE (*tp); |
82d6e6fc | 644 | *tp = new_tree; |
6de9cd9a DN |
645 | |
646 | for (; !tsi_end_p (oi); tsi_next (&oi)) | |
a406865a RG |
647 | { |
648 | tree stmt = tsi_stmt (oi); | |
649 | if (TREE_CODE (stmt) == STATEMENT_LIST) | |
650 | copy_statement_list (&stmt); | |
651 | tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING); | |
652 | } | |
6de9cd9a | 653 | } |
d4e4baa9 | 654 | |
6de9cd9a | 655 | static void |
1b369fae | 656 | copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id) |
6de9cd9a DN |
657 | { |
658 | tree block = BIND_EXPR_BLOCK (*tp); | |
d4e4baa9 AO |
659 | /* Copy (and replace) the statement. */ |
660 | copy_tree_r (tp, walk_subtrees, NULL); | |
6de9cd9a DN |
661 | if (block) |
662 | { | |
663 | remap_block (&block, id); | |
664 | BIND_EXPR_BLOCK (*tp) = block; | |
665 | } | |
d4e4baa9 | 666 | |
6de9cd9a | 667 | if (BIND_EXPR_VARS (*tp)) |
c718820a RG |
668 | { |
669 | tree t; | |
670 | ||
671 | /* This will remap a lot of the same decls again, but this should be | |
672 | harmless. */ | |
673 | BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id); | |
674 | ||
675 | /* Also copy value-expressions. */ | |
676 | for (t = BIND_EXPR_VARS (*tp); t; t = TREE_CHAIN (t)) | |
677 | if (TREE_CODE (t) == VAR_DECL | |
678 | && DECL_HAS_VALUE_EXPR_P (t)) | |
679 | { | |
680 | tree tem = DECL_VALUE_EXPR (t); | |
681 | walk_tree (&tem, copy_tree_body_r, id, NULL); | |
682 | SET_DECL_VALUE_EXPR (t, tem); | |
683 | } | |
684 | } | |
d4e4baa9 AO |
685 | } |
686 | ||
726a989a RB |
687 | |
688 | /* Create a new gimple_seq by remapping all the statements in BODY | |
689 | using the inlining information in ID. */ | |
690 | ||
691 | gimple_seq | |
692 | remap_gimple_seq (gimple_seq body, copy_body_data *id) | |
693 | { | |
694 | gimple_stmt_iterator si; | |
695 | gimple_seq new_body = NULL; | |
696 | ||
697 | for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si)) | |
698 | { | |
699 | gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id); | |
700 | gimple_seq_add_stmt (&new_body, new_stmt); | |
701 | } | |
702 | ||
703 | return new_body; | |
704 | } | |
705 | ||
706 | ||
707 | /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its | |
708 | block using the mapping information in ID. */ | |
709 | ||
710 | static gimple | |
711 | copy_gimple_bind (gimple stmt, copy_body_data *id) | |
712 | { | |
713 | gimple new_bind; | |
714 | tree new_block, new_vars; | |
715 | gimple_seq body, new_body; | |
716 | ||
717 | /* Copy the statement. Note that we purposely don't use copy_stmt | |
718 | here because we need to remap statements as we copy. */ | |
719 | body = gimple_bind_body (stmt); | |
720 | new_body = remap_gimple_seq (body, id); | |
721 | ||
722 | new_block = gimple_bind_block (stmt); | |
723 | if (new_block) | |
724 | remap_block (&new_block, id); | |
725 | ||
726 | /* This will remap a lot of the same decls again, but this should be | |
727 | harmless. */ | |
728 | new_vars = gimple_bind_vars (stmt); | |
729 | if (new_vars) | |
526d73ab | 730 | new_vars = remap_decls (new_vars, NULL, id); |
726a989a RB |
731 | |
732 | new_bind = gimple_build_bind (new_vars, new_body, new_block); | |
733 | ||
734 | return new_bind; | |
735 | } | |
736 | ||
737 | ||
738 | /* Remap the GIMPLE operand pointed to by *TP. DATA is really a | |
739 | 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'. | |
740 | WALK_SUBTREES is used to indicate walk_gimple_op whether to keep | |
741 | recursing into the children nodes of *TP. */ | |
742 | ||
743 | static tree | |
744 | remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data) | |
745 | { | |
746 | struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data; | |
747 | copy_body_data *id = (copy_body_data *) wi_p->info; | |
748 | tree fn = id->src_fn; | |
749 | ||
750 | if (TREE_CODE (*tp) == SSA_NAME) | |
751 | { | |
752 | *tp = remap_ssa_name (*tp, id); | |
753 | *walk_subtrees = 0; | |
754 | return NULL; | |
755 | } | |
756 | else if (auto_var_in_fn_p (*tp, fn)) | |
757 | { | |
758 | /* Local variables and labels need to be replaced by equivalent | |
759 | variables. We don't want to copy static variables; there's | |
760 | only one of those, no matter how many times we inline the | |
761 | containing function. Similarly for globals from an outer | |
762 | function. */ | |
763 | tree new_decl; | |
764 | ||
765 | /* Remap the declaration. */ | |
766 | new_decl = remap_decl (*tp, id); | |
767 | gcc_assert (new_decl); | |
768 | /* Replace this variable with the copy. */ | |
769 | STRIP_TYPE_NOPS (new_decl); | |
211ca15c RG |
770 | /* ??? The C++ frontend uses void * pointer zero to initialize |
771 | any other type. This confuses the middle-end type verification. | |
772 | As cloned bodies do not go through gimplification again the fixup | |
773 | there doesn't trigger. */ | |
774 | if (TREE_CODE (new_decl) == INTEGER_CST | |
775 | && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl))) | |
776 | new_decl = fold_convert (TREE_TYPE (*tp), new_decl); | |
726a989a RB |
777 | *tp = new_decl; |
778 | *walk_subtrees = 0; | |
779 | } | |
780 | else if (TREE_CODE (*tp) == STATEMENT_LIST) | |
781 | gcc_unreachable (); | |
782 | else if (TREE_CODE (*tp) == SAVE_EXPR) | |
783 | gcc_unreachable (); | |
784 | else if (TREE_CODE (*tp) == LABEL_DECL | |
785 | && (!DECL_CONTEXT (*tp) | |
786 | || decl_function_context (*tp) == id->src_fn)) | |
787 | /* These may need to be remapped for EH handling. */ | |
788 | *tp = remap_decl (*tp, id); | |
789 | else if (TYPE_P (*tp)) | |
790 | /* Types may need remapping as well. */ | |
791 | *tp = remap_type (*tp, id); | |
792 | else if (CONSTANT_CLASS_P (*tp)) | |
793 | { | |
794 | /* If this is a constant, we have to copy the node iff the type | |
795 | will be remapped. copy_tree_r will not copy a constant. */ | |
796 | tree new_type = remap_type (TREE_TYPE (*tp), id); | |
797 | ||
798 | if (new_type == TREE_TYPE (*tp)) | |
799 | *walk_subtrees = 0; | |
800 | ||
801 | else if (TREE_CODE (*tp) == INTEGER_CST) | |
802 | *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp), | |
803 | TREE_INT_CST_HIGH (*tp)); | |
804 | else | |
805 | { | |
806 | *tp = copy_node (*tp); | |
807 | TREE_TYPE (*tp) = new_type; | |
808 | } | |
809 | } | |
810 | else | |
811 | { | |
812 | /* Otherwise, just copy the node. Note that copy_tree_r already | |
813 | knows not to copy VAR_DECLs, etc., so this is safe. */ | |
814 | if (TREE_CODE (*tp) == INDIRECT_REF) | |
815 | { | |
816 | /* Get rid of *& from inline substitutions that can happen when a | |
817 | pointer argument is an ADDR_EXPR. */ | |
818 | tree decl = TREE_OPERAND (*tp, 0); | |
819 | tree *n; | |
820 | ||
821 | n = (tree *) pointer_map_contains (id->decl_map, decl); | |
822 | if (n) | |
823 | { | |
82d6e6fc | 824 | tree type, new_tree, old; |
726a989a RB |
825 | |
826 | /* If we happen to get an ADDR_EXPR in n->value, strip | |
827 | it manually here as we'll eventually get ADDR_EXPRs | |
828 | which lie about their types pointed to. In this case | |
829 | build_fold_indirect_ref wouldn't strip the | |
830 | INDIRECT_REF, but we absolutely rely on that. As | |
831 | fold_indirect_ref does other useful transformations, | |
832 | try that first, though. */ | |
833 | type = TREE_TYPE (TREE_TYPE (*n)); | |
82d6e6fc | 834 | new_tree = unshare_expr (*n); |
726a989a | 835 | old = *tp; |
82d6e6fc | 836 | *tp = gimple_fold_indirect_ref (new_tree); |
726a989a RB |
837 | if (!*tp) |
838 | { | |
82d6e6fc | 839 | if (TREE_CODE (new_tree) == ADDR_EXPR) |
726a989a | 840 | { |
db3927fb AH |
841 | *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree), |
842 | type, new_tree); | |
726a989a RB |
843 | /* ??? We should either assert here or build |
844 | a VIEW_CONVERT_EXPR instead of blindly leaking | |
845 | incompatible types to our IL. */ | |
846 | if (! *tp) | |
82d6e6fc | 847 | *tp = TREE_OPERAND (new_tree, 0); |
726a989a RB |
848 | } |
849 | else | |
850 | { | |
82d6e6fc | 851 | *tp = build1 (INDIRECT_REF, type, new_tree); |
726a989a | 852 | TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); |
ce1b6498 | 853 | TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old); |
726a989a RB |
854 | } |
855 | } | |
856 | *walk_subtrees = 0; | |
857 | return NULL; | |
858 | } | |
859 | } | |
860 | ||
861 | /* Here is the "usual case". Copy this tree node, and then | |
862 | tweak some special cases. */ | |
863 | copy_tree_r (tp, walk_subtrees, NULL); | |
864 | ||
865 | /* Global variables we haven't seen yet need to go into referenced | |
866 | vars. If not referenced from types only. */ | |
867 | if (gimple_in_ssa_p (cfun) | |
868 | && TREE_CODE (*tp) == VAR_DECL | |
b5b8b0ac AO |
869 | && id->remapping_type_depth == 0 |
870 | && !processing_debug_stmt) | |
726a989a RB |
871 | add_referenced_var (*tp); |
872 | ||
873 | /* We should never have TREE_BLOCK set on non-statements. */ | |
874 | if (EXPR_P (*tp)) | |
875 | gcc_assert (!TREE_BLOCK (*tp)); | |
876 | ||
877 | if (TREE_CODE (*tp) != OMP_CLAUSE) | |
878 | TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id); | |
879 | ||
880 | if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3)) | |
881 | { | |
882 | /* The copied TARGET_EXPR has never been expanded, even if the | |
883 | original node was expanded already. */ | |
884 | TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3); | |
885 | TREE_OPERAND (*tp, 3) = NULL_TREE; | |
886 | } | |
887 | else if (TREE_CODE (*tp) == ADDR_EXPR) | |
888 | { | |
889 | /* Variable substitution need not be simple. In particular, | |
890 | the INDIRECT_REF substitution above. Make sure that | |
891 | TREE_CONSTANT and friends are up-to-date. But make sure | |
892 | to not improperly set TREE_BLOCK on some sub-expressions. */ | |
893 | int invariant = is_gimple_min_invariant (*tp); | |
894 | tree block = id->block; | |
895 | id->block = NULL_TREE; | |
896 | walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL); | |
897 | id->block = block; | |
898 | ||
899 | /* Handle the case where we substituted an INDIRECT_REF | |
900 | into the operand of the ADDR_EXPR. */ | |
901 | if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF) | |
902 | *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0); | |
903 | else | |
904 | recompute_tree_invariant_for_addr_expr (*tp); | |
905 | ||
906 | /* If this used to be invariant, but is not any longer, | |
907 | then regimplification is probably needed. */ | |
908 | if (invariant && !is_gimple_min_invariant (*tp)) | |
909 | id->regimplify = true; | |
910 | ||
911 | *walk_subtrees = 0; | |
912 | } | |
913 | } | |
914 | ||
915 | /* Keep iterating. */ | |
916 | return NULL_TREE; | |
917 | } | |
918 | ||
919 | ||
920 | /* Called from copy_body_id via walk_tree. DATA is really a | |
1b369fae | 921 | `copy_body_data *'. */ |
aa4a53af | 922 | |
1b369fae | 923 | tree |
726a989a | 924 | copy_tree_body_r (tree *tp, int *walk_subtrees, void *data) |
d4e4baa9 | 925 | { |
1b369fae RH |
926 | copy_body_data *id = (copy_body_data *) data; |
927 | tree fn = id->src_fn; | |
acb8f212 | 928 | tree new_block; |
d4e4baa9 | 929 | |
e21aff8a SB |
930 | /* Begin by recognizing trees that we'll completely rewrite for the |
931 | inlining context. Our output for these trees is completely | |
932 | different from out input (e.g. RETURN_EXPR is deleted, and morphs | |
933 | into an edge). Further down, we'll handle trees that get | |
934 | duplicated and/or tweaked. */ | |
d4e4baa9 | 935 | |
1b369fae | 936 | /* When requested, RETURN_EXPRs should be transformed to just the |
726a989a | 937 | contained MODIFY_EXPR. The branch semantics of the return will |
1b369fae RH |
938 | be handled elsewhere by manipulating the CFG rather than a statement. */ |
939 | if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify) | |
d4e4baa9 | 940 | { |
e21aff8a | 941 | tree assignment = TREE_OPERAND (*tp, 0); |
d4e4baa9 AO |
942 | |
943 | /* If we're returning something, just turn that into an | |
e21aff8a SB |
944 | assignment into the equivalent of the original RESULT_DECL. |
945 | If the "assignment" is just the result decl, the result | |
946 | decl has already been set (e.g. a recent "foo (&result_decl, | |
947 | ...)"); just toss the entire RETURN_EXPR. */ | |
726a989a | 948 | if (assignment && TREE_CODE (assignment) == MODIFY_EXPR) |
e21aff8a SB |
949 | { |
950 | /* Replace the RETURN_EXPR with (a copy of) the | |
726a989a | 951 | MODIFY_EXPR hanging underneath. */ |
e21aff8a SB |
952 | *tp = copy_node (assignment); |
953 | } | |
954 | else /* Else the RETURN_EXPR returns no value. */ | |
955 | { | |
956 | *tp = NULL; | |
cceb1885 | 957 | return (tree) (void *)1; |
e21aff8a | 958 | } |
d4e4baa9 | 959 | } |
110cfe1c JH |
960 | else if (TREE_CODE (*tp) == SSA_NAME) |
961 | { | |
962 | *tp = remap_ssa_name (*tp, id); | |
963 | *walk_subtrees = 0; | |
964 | return NULL; | |
965 | } | |
e21aff8a | 966 | |
d4e4baa9 AO |
967 | /* Local variables and labels need to be replaced by equivalent |
968 | variables. We don't want to copy static variables; there's only | |
969 | one of those, no matter how many times we inline the containing | |
5377d5ba | 970 | function. Similarly for globals from an outer function. */ |
50886bf1 | 971 | else if (auto_var_in_fn_p (*tp, fn)) |
d4e4baa9 AO |
972 | { |
973 | tree new_decl; | |
974 | ||
975 | /* Remap the declaration. */ | |
976 | new_decl = remap_decl (*tp, id); | |
1e128c5f | 977 | gcc_assert (new_decl); |
d4e4baa9 AO |
978 | /* Replace this variable with the copy. */ |
979 | STRIP_TYPE_NOPS (new_decl); | |
980 | *tp = new_decl; | |
e4cf29ae | 981 | *walk_subtrees = 0; |
d4e4baa9 | 982 | } |
6de9cd9a DN |
983 | else if (TREE_CODE (*tp) == STATEMENT_LIST) |
984 | copy_statement_list (tp); | |
a406865a RG |
985 | else if (TREE_CODE (*tp) == SAVE_EXPR |
986 | || TREE_CODE (*tp) == TARGET_EXPR) | |
82c82743 | 987 | remap_save_expr (tp, id->decl_map, walk_subtrees); |
17acc01a JH |
988 | else if (TREE_CODE (*tp) == LABEL_DECL |
989 | && (! DECL_CONTEXT (*tp) | |
1b369fae | 990 | || decl_function_context (*tp) == id->src_fn)) |
e21aff8a | 991 | /* These may need to be remapped for EH handling. */ |
17acc01a | 992 | *tp = remap_decl (*tp, id); |
6de9cd9a DN |
993 | else if (TREE_CODE (*tp) == BIND_EXPR) |
994 | copy_bind_expr (tp, walk_subtrees, id); | |
3c2a7a6a RH |
995 | /* Types may need remapping as well. */ |
996 | else if (TYPE_P (*tp)) | |
997 | *tp = remap_type (*tp, id); | |
998 | ||
bb04998a RK |
999 | /* If this is a constant, we have to copy the node iff the type will be |
1000 | remapped. copy_tree_r will not copy a constant. */ | |
3cf11075 | 1001 | else if (CONSTANT_CLASS_P (*tp)) |
bb04998a RK |
1002 | { |
1003 | tree new_type = remap_type (TREE_TYPE (*tp), id); | |
1004 | ||
1005 | if (new_type == TREE_TYPE (*tp)) | |
1006 | *walk_subtrees = 0; | |
1007 | ||
1008 | else if (TREE_CODE (*tp) == INTEGER_CST) | |
1009 | *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp), | |
1010 | TREE_INT_CST_HIGH (*tp)); | |
1011 | else | |
1012 | { | |
1013 | *tp = copy_node (*tp); | |
1014 | TREE_TYPE (*tp) = new_type; | |
1015 | } | |
1016 | } | |
1017 | ||
d4e4baa9 AO |
1018 | /* Otherwise, just copy the node. Note that copy_tree_r already |
1019 | knows not to copy VAR_DECLs, etc., so this is safe. */ | |
1020 | else | |
1021 | { | |
e21aff8a SB |
1022 | /* Here we handle trees that are not completely rewritten. |
1023 | First we detect some inlining-induced bogosities for | |
1024 | discarding. */ | |
726a989a RB |
1025 | if (TREE_CODE (*tp) == MODIFY_EXPR |
1026 | && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1) | |
1027 | && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn))) | |
d4e4baa9 AO |
1028 | { |
1029 | /* Some assignments VAR = VAR; don't generate any rtl code | |
1030 | and thus don't count as variable modification. Avoid | |
1031 | keeping bogosities like 0 = 0. */ | |
726a989a | 1032 | tree decl = TREE_OPERAND (*tp, 0), value; |
6be42dd4 | 1033 | tree *n; |
d4e4baa9 | 1034 | |
6be42dd4 | 1035 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
d4e4baa9 AO |
1036 | if (n) |
1037 | { | |
6be42dd4 | 1038 | value = *n; |
d4e4baa9 | 1039 | STRIP_TYPE_NOPS (value); |
becfd6e5 | 1040 | if (TREE_CONSTANT (value) || TREE_READONLY (value)) |
68594ce7 | 1041 | { |
c2255bc4 | 1042 | *tp = build_empty_stmt (EXPR_LOCATION (*tp)); |
726a989a | 1043 | return copy_tree_body_r (tp, walk_subtrees, data); |
68594ce7 | 1044 | } |
d4e4baa9 AO |
1045 | } |
1046 | } | |
1b369fae | 1047 | else if (TREE_CODE (*tp) == INDIRECT_REF) |
6de9cd9a DN |
1048 | { |
1049 | /* Get rid of *& from inline substitutions that can happen when a | |
1050 | pointer argument is an ADDR_EXPR. */ | |
81cfbbc2 | 1051 | tree decl = TREE_OPERAND (*tp, 0); |
6be42dd4 | 1052 | tree *n; |
6de9cd9a | 1053 | |
6be42dd4 | 1054 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
6de9cd9a DN |
1055 | if (n) |
1056 | { | |
82d6e6fc | 1057 | tree new_tree; |
d84b37b0 | 1058 | tree old; |
30d2e943 RG |
1059 | /* If we happen to get an ADDR_EXPR in n->value, strip |
1060 | it manually here as we'll eventually get ADDR_EXPRs | |
1061 | which lie about their types pointed to. In this case | |
1062 | build_fold_indirect_ref wouldn't strip the INDIRECT_REF, | |
095ecc24 RG |
1063 | but we absolutely rely on that. As fold_indirect_ref |
1064 | does other useful transformations, try that first, though. */ | |
6be42dd4 | 1065 | tree type = TREE_TYPE (TREE_TYPE (*n)); |
f82a627c EB |
1066 | if (id->do_not_unshare) |
1067 | new_tree = *n; | |
1068 | else | |
1069 | new_tree = unshare_expr (*n); | |
d84b37b0 | 1070 | old = *tp; |
82d6e6fc | 1071 | *tp = gimple_fold_indirect_ref (new_tree); |
095ecc24 RG |
1072 | if (! *tp) |
1073 | { | |
82d6e6fc | 1074 | if (TREE_CODE (new_tree) == ADDR_EXPR) |
de4af523 | 1075 | { |
db3927fb AH |
1076 | *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree), |
1077 | type, new_tree); | |
de4af523 JJ |
1078 | /* ??? We should either assert here or build |
1079 | a VIEW_CONVERT_EXPR instead of blindly leaking | |
1080 | incompatible types to our IL. */ | |
1081 | if (! *tp) | |
82d6e6fc | 1082 | *tp = TREE_OPERAND (new_tree, 0); |
de4af523 | 1083 | } |
095ecc24 | 1084 | else |
d84b37b0 | 1085 | { |
82d6e6fc | 1086 | *tp = build1 (INDIRECT_REF, type, new_tree); |
d84b37b0 | 1087 | TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); |
955f6531 | 1088 | TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old); |
d84b37b0 | 1089 | } |
095ecc24 | 1090 | } |
81cfbbc2 JH |
1091 | *walk_subtrees = 0; |
1092 | return NULL; | |
68594ce7 JM |
1093 | } |
1094 | } | |
1095 | ||
e21aff8a SB |
1096 | /* Here is the "usual case". Copy this tree node, and then |
1097 | tweak some special cases. */ | |
1b369fae | 1098 | copy_tree_r (tp, walk_subtrees, NULL); |
110cfe1c | 1099 | |
4f5c64b8 | 1100 | /* Global variables we haven't seen yet needs to go into referenced |
b5b8b0ac | 1101 | vars. If not referenced from types or debug stmts only. */ |
726a989a RB |
1102 | if (gimple_in_ssa_p (cfun) |
1103 | && TREE_CODE (*tp) == VAR_DECL | |
b5b8b0ac AO |
1104 | && id->remapping_type_depth == 0 |
1105 | && !processing_debug_stmt) | |
110cfe1c | 1106 | add_referenced_var (*tp); |
b8698a0f | 1107 | |
acb8f212 JH |
1108 | /* If EXPR has block defined, map it to newly constructed block. |
1109 | When inlining we want EXPRs without block appear in the block | |
ee0192a2 | 1110 | of function call if we are not remapping a type. */ |
726a989a | 1111 | if (EXPR_P (*tp)) |
acb8f212 | 1112 | { |
ee0192a2 | 1113 | new_block = id->remapping_type_depth == 0 ? id->block : NULL; |
acb8f212 JH |
1114 | if (TREE_BLOCK (*tp)) |
1115 | { | |
6be42dd4 RG |
1116 | tree *n; |
1117 | n = (tree *) pointer_map_contains (id->decl_map, | |
1118 | TREE_BLOCK (*tp)); | |
acb8f212 | 1119 | gcc_assert (n); |
6be42dd4 | 1120 | new_block = *n; |
acb8f212 JH |
1121 | } |
1122 | TREE_BLOCK (*tp) = new_block; | |
1123 | } | |
68594ce7 | 1124 | |
726a989a | 1125 | if (TREE_CODE (*tp) != OMP_CLAUSE) |
07beea0d | 1126 | TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id); |
3c2a7a6a | 1127 | |
68594ce7 JM |
1128 | /* The copied TARGET_EXPR has never been expanded, even if the |
1129 | original node was expanded already. */ | |
1130 | if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3)) | |
1131 | { | |
1132 | TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3); | |
1133 | TREE_OPERAND (*tp, 3) = NULL_TREE; | |
1134 | } | |
84cce55d RH |
1135 | |
1136 | /* Variable substitution need not be simple. In particular, the | |
1137 | INDIRECT_REF substitution above. Make sure that TREE_CONSTANT | |
1138 | and friends are up-to-date. */ | |
1139 | else if (TREE_CODE (*tp) == ADDR_EXPR) | |
1140 | { | |
ad6003f2 | 1141 | int invariant = is_gimple_min_invariant (*tp); |
726a989a RB |
1142 | walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL); |
1143 | ||
8e85fd14 RG |
1144 | /* Handle the case where we substituted an INDIRECT_REF |
1145 | into the operand of the ADDR_EXPR. */ | |
1146 | if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF) | |
1147 | *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0); | |
1148 | else | |
1149 | recompute_tree_invariant_for_addr_expr (*tp); | |
726a989a | 1150 | |
416c991f JJ |
1151 | /* If this used to be invariant, but is not any longer, |
1152 | then regimplification is probably needed. */ | |
ad6003f2 | 1153 | if (invariant && !is_gimple_min_invariant (*tp)) |
416c991f | 1154 | id->regimplify = true; |
726a989a | 1155 | |
84cce55d RH |
1156 | *walk_subtrees = 0; |
1157 | } | |
d4e4baa9 AO |
1158 | } |
1159 | ||
1160 | /* Keep iterating. */ | |
1161 | return NULL_TREE; | |
1162 | } | |
1163 | ||
1d65f45c RH |
1164 | /* Helper for remap_gimple_stmt. Given an EH region number for the |
1165 | source function, map that to the duplicate EH region number in | |
1166 | the destination function. */ | |
1167 | ||
1168 | static int | |
1169 | remap_eh_region_nr (int old_nr, copy_body_data *id) | |
1170 | { | |
1171 | eh_region old_r, new_r; | |
1172 | void **slot; | |
1173 | ||
1174 | old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr); | |
1175 | slot = pointer_map_contains (id->eh_map, old_r); | |
1176 | new_r = (eh_region) *slot; | |
1177 | ||
1178 | return new_r->index; | |
1179 | } | |
1180 | ||
1181 | /* Similar, but operate on INTEGER_CSTs. */ | |
1182 | ||
1183 | static tree | |
1184 | remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id) | |
1185 | { | |
1186 | int old_nr, new_nr; | |
1187 | ||
1188 | old_nr = tree_low_cst (old_t_nr, 0); | |
1189 | new_nr = remap_eh_region_nr (old_nr, id); | |
1190 | ||
1191 | return build_int_cst (NULL, new_nr); | |
1192 | } | |
726a989a RB |
1193 | |
1194 | /* Helper for copy_bb. Remap statement STMT using the inlining | |
1195 | information in ID. Return the new statement copy. */ | |
1196 | ||
1197 | static gimple | |
1198 | remap_gimple_stmt (gimple stmt, copy_body_data *id) | |
1199 | { | |
1200 | gimple copy = NULL; | |
1201 | struct walk_stmt_info wi; | |
1202 | tree new_block; | |
5a6e26b7 | 1203 | bool skip_first = false; |
726a989a RB |
1204 | |
1205 | /* Begin by recognizing trees that we'll completely rewrite for the | |
1206 | inlining context. Our output for these trees is completely | |
1207 | different from out input (e.g. RETURN_EXPR is deleted, and morphs | |
1208 | into an edge). Further down, we'll handle trees that get | |
1209 | duplicated and/or tweaked. */ | |
1210 | ||
1211 | /* When requested, GIMPLE_RETURNs should be transformed to just the | |
1212 | contained GIMPLE_ASSIGN. The branch semantics of the return will | |
1213 | be handled elsewhere by manipulating the CFG rather than the | |
1214 | statement. */ | |
1215 | if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify) | |
1216 | { | |
1217 | tree retval = gimple_return_retval (stmt); | |
1218 | ||
1219 | /* If we're returning something, just turn that into an | |
1220 | assignment into the equivalent of the original RESULT_DECL. | |
1221 | If RETVAL is just the result decl, the result decl has | |
1222 | already been set (e.g. a recent "foo (&result_decl, ...)"); | |
1223 | just toss the entire GIMPLE_RETURN. */ | |
1224 | if (retval && TREE_CODE (retval) != RESULT_DECL) | |
5a6e26b7 JH |
1225 | { |
1226 | copy = gimple_build_assign (id->retvar, retval); | |
1227 | /* id->retvar is already substituted. Skip it on later remapping. */ | |
1228 | skip_first = true; | |
1229 | } | |
726a989a RB |
1230 | else |
1231 | return gimple_build_nop (); | |
1232 | } | |
1233 | else if (gimple_has_substatements (stmt)) | |
1234 | { | |
1235 | gimple_seq s1, s2; | |
1236 | ||
1237 | /* When cloning bodies from the C++ front end, we will be handed bodies | |
1238 | in High GIMPLE form. Handle here all the High GIMPLE statements that | |
1239 | have embedded statements. */ | |
1240 | switch (gimple_code (stmt)) | |
1241 | { | |
1242 | case GIMPLE_BIND: | |
1243 | copy = copy_gimple_bind (stmt, id); | |
1244 | break; | |
1245 | ||
1246 | case GIMPLE_CATCH: | |
1247 | s1 = remap_gimple_seq (gimple_catch_handler (stmt), id); | |
1248 | copy = gimple_build_catch (gimple_catch_types (stmt), s1); | |
1249 | break; | |
1250 | ||
1251 | case GIMPLE_EH_FILTER: | |
1252 | s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id); | |
1253 | copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1); | |
1254 | break; | |
1255 | ||
1256 | case GIMPLE_TRY: | |
1257 | s1 = remap_gimple_seq (gimple_try_eval (stmt), id); | |
1258 | s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id); | |
b8698a0f | 1259 | copy = gimple_build_try (s1, s2, gimple_try_kind (stmt)); |
726a989a RB |
1260 | break; |
1261 | ||
1262 | case GIMPLE_WITH_CLEANUP_EXPR: | |
1263 | s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id); | |
1264 | copy = gimple_build_wce (s1); | |
1265 | break; | |
1266 | ||
1267 | case GIMPLE_OMP_PARALLEL: | |
1268 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1269 | copy = gimple_build_omp_parallel | |
1270 | (s1, | |
1271 | gimple_omp_parallel_clauses (stmt), | |
1272 | gimple_omp_parallel_child_fn (stmt), | |
1273 | gimple_omp_parallel_data_arg (stmt)); | |
1274 | break; | |
1275 | ||
1276 | case GIMPLE_OMP_TASK: | |
1277 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1278 | copy = gimple_build_omp_task | |
1279 | (s1, | |
1280 | gimple_omp_task_clauses (stmt), | |
1281 | gimple_omp_task_child_fn (stmt), | |
1282 | gimple_omp_task_data_arg (stmt), | |
1283 | gimple_omp_task_copy_fn (stmt), | |
1284 | gimple_omp_task_arg_size (stmt), | |
1285 | gimple_omp_task_arg_align (stmt)); | |
1286 | break; | |
1287 | ||
1288 | case GIMPLE_OMP_FOR: | |
1289 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1290 | s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id); | |
1291 | copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt), | |
1292 | gimple_omp_for_collapse (stmt), s2); | |
1293 | { | |
1294 | size_t i; | |
1295 | for (i = 0; i < gimple_omp_for_collapse (stmt); i++) | |
1296 | { | |
1297 | gimple_omp_for_set_index (copy, i, | |
1298 | gimple_omp_for_index (stmt, i)); | |
1299 | gimple_omp_for_set_initial (copy, i, | |
1300 | gimple_omp_for_initial (stmt, i)); | |
1301 | gimple_omp_for_set_final (copy, i, | |
1302 | gimple_omp_for_final (stmt, i)); | |
1303 | gimple_omp_for_set_incr (copy, i, | |
1304 | gimple_omp_for_incr (stmt, i)); | |
1305 | gimple_omp_for_set_cond (copy, i, | |
1306 | gimple_omp_for_cond (stmt, i)); | |
1307 | } | |
1308 | } | |
1309 | break; | |
1310 | ||
1311 | case GIMPLE_OMP_MASTER: | |
1312 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1313 | copy = gimple_build_omp_master (s1); | |
1314 | break; | |
1315 | ||
1316 | case GIMPLE_OMP_ORDERED: | |
1317 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1318 | copy = gimple_build_omp_ordered (s1); | |
1319 | break; | |
1320 | ||
1321 | case GIMPLE_OMP_SECTION: | |
1322 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1323 | copy = gimple_build_omp_section (s1); | |
1324 | break; | |
1325 | ||
1326 | case GIMPLE_OMP_SECTIONS: | |
1327 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1328 | copy = gimple_build_omp_sections | |
1329 | (s1, gimple_omp_sections_clauses (stmt)); | |
1330 | break; | |
1331 | ||
1332 | case GIMPLE_OMP_SINGLE: | |
1333 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1334 | copy = gimple_build_omp_single | |
1335 | (s1, gimple_omp_single_clauses (stmt)); | |
1336 | break; | |
1337 | ||
05a26161 JJ |
1338 | case GIMPLE_OMP_CRITICAL: |
1339 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1340 | copy | |
1341 | = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt)); | |
1342 | break; | |
1343 | ||
726a989a RB |
1344 | default: |
1345 | gcc_unreachable (); | |
1346 | } | |
1347 | } | |
1348 | else | |
1349 | { | |
1350 | if (gimple_assign_copy_p (stmt) | |
1351 | && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt) | |
1352 | && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn)) | |
1353 | { | |
1354 | /* Here we handle statements that are not completely rewritten. | |
1355 | First we detect some inlining-induced bogosities for | |
1356 | discarding. */ | |
1357 | ||
1358 | /* Some assignments VAR = VAR; don't generate any rtl code | |
1359 | and thus don't count as variable modification. Avoid | |
1360 | keeping bogosities like 0 = 0. */ | |
1361 | tree decl = gimple_assign_lhs (stmt), value; | |
1362 | tree *n; | |
1363 | ||
1364 | n = (tree *) pointer_map_contains (id->decl_map, decl); | |
1365 | if (n) | |
1366 | { | |
1367 | value = *n; | |
1368 | STRIP_TYPE_NOPS (value); | |
1369 | if (TREE_CONSTANT (value) || TREE_READONLY (value)) | |
1370 | return gimple_build_nop (); | |
1371 | } | |
1372 | } | |
1373 | ||
b5b8b0ac AO |
1374 | if (gimple_debug_bind_p (stmt)) |
1375 | { | |
1376 | copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt), | |
1377 | gimple_debug_bind_get_value (stmt), | |
1378 | stmt); | |
1379 | VEC_safe_push (gimple, heap, id->debug_stmts, copy); | |
1380 | return copy; | |
1381 | } | |
1d65f45c RH |
1382 | |
1383 | /* Create a new deep copy of the statement. */ | |
1384 | copy = gimple_copy (stmt); | |
1385 | ||
1386 | /* Remap the region numbers for __builtin_eh_{pointer,filter}, | |
1387 | RESX and EH_DISPATCH. */ | |
1388 | if (id->eh_map) | |
1389 | switch (gimple_code (copy)) | |
1390 | { | |
1391 | case GIMPLE_CALL: | |
1392 | { | |
1393 | tree r, fndecl = gimple_call_fndecl (copy); | |
1394 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
1395 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1396 | { | |
1397 | case BUILT_IN_EH_COPY_VALUES: | |
1398 | r = gimple_call_arg (copy, 1); | |
1399 | r = remap_eh_region_tree_nr (r, id); | |
1400 | gimple_call_set_arg (copy, 1, r); | |
1401 | /* FALLTHRU */ | |
1402 | ||
1403 | case BUILT_IN_EH_POINTER: | |
1404 | case BUILT_IN_EH_FILTER: | |
1405 | r = gimple_call_arg (copy, 0); | |
1406 | r = remap_eh_region_tree_nr (r, id); | |
1407 | gimple_call_set_arg (copy, 0, r); | |
1408 | break; | |
1409 | ||
1410 | default: | |
1411 | break; | |
1412 | } | |
d086d311 | 1413 | |
25a6a873 RG |
1414 | /* Reset alias info if we didn't apply measures to |
1415 | keep it valid over inlining by setting DECL_PT_UID. */ | |
1416 | if (!id->src_cfun->gimple_df | |
1417 | || !id->src_cfun->gimple_df->ipa_pta) | |
1418 | gimple_call_reset_alias_info (copy); | |
1d65f45c RH |
1419 | } |
1420 | break; | |
1421 | ||
1422 | case GIMPLE_RESX: | |
1423 | { | |
1424 | int r = gimple_resx_region (copy); | |
1425 | r = remap_eh_region_nr (r, id); | |
1426 | gimple_resx_set_region (copy, r); | |
1427 | } | |
1428 | break; | |
1429 | ||
1430 | case GIMPLE_EH_DISPATCH: | |
1431 | { | |
1432 | int r = gimple_eh_dispatch_region (copy); | |
1433 | r = remap_eh_region_nr (r, id); | |
1434 | gimple_eh_dispatch_set_region (copy, r); | |
1435 | } | |
1436 | break; | |
1437 | ||
1438 | default: | |
1439 | break; | |
1440 | } | |
726a989a RB |
1441 | } |
1442 | ||
1443 | /* If STMT has a block defined, map it to the newly constructed | |
1444 | block. When inlining we want statements without a block to | |
1445 | appear in the block of the function call. */ | |
1446 | new_block = id->block; | |
1447 | if (gimple_block (copy)) | |
1448 | { | |
1449 | tree *n; | |
1450 | n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy)); | |
1451 | gcc_assert (n); | |
1452 | new_block = *n; | |
1453 | } | |
1454 | ||
1455 | gimple_set_block (copy, new_block); | |
1456 | ||
b5b8b0ac AO |
1457 | if (gimple_debug_bind_p (copy)) |
1458 | return copy; | |
1459 | ||
726a989a RB |
1460 | /* Remap all the operands in COPY. */ |
1461 | memset (&wi, 0, sizeof (wi)); | |
1462 | wi.info = id; | |
5a6e26b7 JH |
1463 | if (skip_first) |
1464 | walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL); | |
1465 | else | |
b8698a0f | 1466 | walk_gimple_op (copy, remap_gimple_op_r, &wi); |
726a989a | 1467 | |
5006671f RG |
1468 | /* Clear the copied virtual operands. We are not remapping them here |
1469 | but are going to recreate them from scratch. */ | |
1470 | if (gimple_has_mem_ops (copy)) | |
1471 | { | |
1472 | gimple_set_vdef (copy, NULL_TREE); | |
1473 | gimple_set_vuse (copy, NULL_TREE); | |
1474 | } | |
1475 | ||
726a989a RB |
1476 | return copy; |
1477 | } | |
1478 | ||
1479 | ||
e21aff8a SB |
1480 | /* Copy basic block, scale profile accordingly. Edges will be taken care of |
1481 | later */ | |
1482 | ||
1483 | static basic_block | |
0178d644 VR |
1484 | copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, |
1485 | gcov_type count_scale) | |
e21aff8a | 1486 | { |
c2a4718a | 1487 | gimple_stmt_iterator gsi, copy_gsi, seq_gsi; |
e21aff8a | 1488 | basic_block copy_basic_block; |
726a989a | 1489 | tree decl; |
0d63a740 | 1490 | gcov_type freq; |
e21aff8a SB |
1491 | |
1492 | /* create_basic_block() will append every new block to | |
1493 | basic_block_info automatically. */ | |
cceb1885 GDR |
1494 | copy_basic_block = create_basic_block (NULL, (void *) 0, |
1495 | (basic_block) bb->prev_bb->aux); | |
e21aff8a | 1496 | copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE; |
45a80bb9 | 1497 | |
726a989a RB |
1498 | /* We are going to rebuild frequencies from scratch. These values |
1499 | have just small importance to drive canonicalize_loop_headers. */ | |
0d63a740 | 1500 | freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE); |
726a989a | 1501 | |
0d63a740 JH |
1502 | /* We recompute frequencies after inlining, so this is quite safe. */ |
1503 | if (freq > BB_FREQ_MAX) | |
1504 | freq = BB_FREQ_MAX; | |
1505 | copy_basic_block->frequency = freq; | |
e21aff8a | 1506 | |
726a989a RB |
1507 | copy_gsi = gsi_start_bb (copy_basic_block); |
1508 | ||
1509 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
e21aff8a | 1510 | { |
726a989a RB |
1511 | gimple stmt = gsi_stmt (gsi); |
1512 | gimple orig_stmt = stmt; | |
e21aff8a | 1513 | |
416c991f | 1514 | id->regimplify = false; |
726a989a RB |
1515 | stmt = remap_gimple_stmt (stmt, id); |
1516 | if (gimple_nop_p (stmt)) | |
1517 | continue; | |
1518 | ||
1519 | gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt); | |
c2a4718a | 1520 | seq_gsi = copy_gsi; |
726a989a RB |
1521 | |
1522 | /* With return slot optimization we can end up with | |
1523 | non-gimple (foo *)&this->m, fix that here. */ | |
4c29307d JJ |
1524 | if (is_gimple_assign (stmt) |
1525 | && gimple_assign_rhs_code (stmt) == NOP_EXPR | |
1526 | && !is_gimple_val (gimple_assign_rhs1 (stmt))) | |
e21aff8a | 1527 | { |
726a989a | 1528 | tree new_rhs; |
c2a4718a | 1529 | new_rhs = force_gimple_operand_gsi (&seq_gsi, |
4a2b7f24 JJ |
1530 | gimple_assign_rhs1 (stmt), |
1531 | true, NULL, false, GSI_NEW_STMT); | |
726a989a | 1532 | gimple_assign_set_rhs1 (stmt, new_rhs); |
c2a4718a | 1533 | id->regimplify = false; |
726a989a | 1534 | } |
2b65dae5 | 1535 | |
c2a4718a JJ |
1536 | gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT); |
1537 | ||
1538 | if (id->regimplify) | |
1539 | gimple_regimplify_operands (stmt, &seq_gsi); | |
1540 | ||
1541 | /* If copy_basic_block has been empty at the start of this iteration, | |
1542 | call gsi_start_bb again to get at the newly added statements. */ | |
1543 | if (gsi_end_p (copy_gsi)) | |
1544 | copy_gsi = gsi_start_bb (copy_basic_block); | |
1545 | else | |
1546 | gsi_next (©_gsi); | |
110cfe1c | 1547 | |
726a989a RB |
1548 | /* Process the new statement. The call to gimple_regimplify_operands |
1549 | possibly turned the statement into multiple statements, we | |
1550 | need to process all of them. */ | |
c2a4718a | 1551 | do |
726a989a | 1552 | { |
9187e02d JH |
1553 | tree fn; |
1554 | ||
c2a4718a | 1555 | stmt = gsi_stmt (copy_gsi); |
726a989a RB |
1556 | if (is_gimple_call (stmt) |
1557 | && gimple_call_va_arg_pack_p (stmt) | |
1558 | && id->gimple_call) | |
1559 | { | |
1560 | /* __builtin_va_arg_pack () should be replaced by | |
1561 | all arguments corresponding to ... in the caller. */ | |
1562 | tree p; | |
1563 | gimple new_call; | |
1564 | VEC(tree, heap) *argarray; | |
1565 | size_t nargs = gimple_call_num_args (id->gimple_call); | |
1566 | size_t n; | |
1567 | ||
1568 | for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p)) | |
1569 | nargs--; | |
1570 | ||
1571 | /* Create the new array of arguments. */ | |
1572 | n = nargs + gimple_call_num_args (stmt); | |
1573 | argarray = VEC_alloc (tree, heap, n); | |
1574 | VEC_safe_grow (tree, heap, argarray, n); | |
1575 | ||
1576 | /* Copy all the arguments before '...' */ | |
1577 | memcpy (VEC_address (tree, argarray), | |
1578 | gimple_call_arg_ptr (stmt, 0), | |
1579 | gimple_call_num_args (stmt) * sizeof (tree)); | |
1580 | ||
1581 | /* Append the arguments passed in '...' */ | |
1582 | memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt), | |
1583 | gimple_call_arg_ptr (id->gimple_call, 0) | |
1584 | + (gimple_call_num_args (id->gimple_call) - nargs), | |
1585 | nargs * sizeof (tree)); | |
1586 | ||
1587 | new_call = gimple_build_call_vec (gimple_call_fn (stmt), | |
1588 | argarray); | |
1589 | ||
1590 | VEC_free (tree, heap, argarray); | |
1591 | ||
1592 | /* Copy all GIMPLE_CALL flags, location and block, except | |
1593 | GF_CALL_VA_ARG_PACK. */ | |
1594 | gimple_call_copy_flags (new_call, stmt); | |
1595 | gimple_call_set_va_arg_pack (new_call, false); | |
1596 | gimple_set_location (new_call, gimple_location (stmt)); | |
1597 | gimple_set_block (new_call, gimple_block (stmt)); | |
1598 | gimple_call_set_lhs (new_call, gimple_call_lhs (stmt)); | |
1599 | ||
1600 | gsi_replace (©_gsi, new_call, false); | |
9cfa22be | 1601 | gimple_set_bb (stmt, NULL); |
726a989a RB |
1602 | stmt = new_call; |
1603 | } | |
1604 | else if (is_gimple_call (stmt) | |
1605 | && id->gimple_call | |
1606 | && (decl = gimple_call_fndecl (stmt)) | |
1607 | && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL | |
1608 | && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN) | |
e0704a46 | 1609 | { |
726a989a RB |
1610 | /* __builtin_va_arg_pack_len () should be replaced by |
1611 | the number of anonymous arguments. */ | |
1612 | size_t nargs = gimple_call_num_args (id->gimple_call); | |
1613 | tree count, p; | |
1614 | gimple new_stmt; | |
1615 | ||
1616 | for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p)) | |
1617 | nargs--; | |
1618 | ||
1619 | count = build_int_cst (integer_type_node, nargs); | |
1620 | new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count); | |
1621 | gsi_replace (©_gsi, new_stmt, false); | |
1622 | stmt = new_stmt; | |
1623 | } | |
b8a00a4d | 1624 | |
726a989a RB |
1625 | /* Statements produced by inlining can be unfolded, especially |
1626 | when we constant propagated some operands. We can't fold | |
1627 | them right now for two reasons: | |
1628 | 1) folding require SSA_NAME_DEF_STMTs to be correct | |
1629 | 2) we can't change function calls to builtins. | |
1630 | So we just mark statement for later folding. We mark | |
1631 | all new statements, instead just statements that has changed | |
1632 | by some nontrivial substitution so even statements made | |
1633 | foldable indirectly are updated. If this turns out to be | |
1634 | expensive, copy_body can be told to watch for nontrivial | |
1635 | changes. */ | |
1636 | if (id->statements_to_fold) | |
1637 | pointer_set_insert (id->statements_to_fold, stmt); | |
1638 | ||
1639 | /* We're duplicating a CALL_EXPR. Find any corresponding | |
1640 | callgraph edges and update or duplicate them. */ | |
1641 | if (is_gimple_call (stmt)) | |
1642 | { | |
9b2a5ef7 | 1643 | struct cgraph_edge *edge; |
f618d33e | 1644 | int flags; |
6ef5231b | 1645 | |
726a989a | 1646 | switch (id->transform_call_graph_edges) |
e0704a46 | 1647 | { |
9b2a5ef7 RH |
1648 | case CB_CGE_DUPLICATE: |
1649 | edge = cgraph_edge (id->src_node, orig_stmt); | |
1650 | if (edge) | |
0d63a740 JH |
1651 | { |
1652 | int edge_freq = edge->frequency; | |
1653 | edge = cgraph_clone_edge (edge, id->dst_node, stmt, | |
1654 | gimple_uid (stmt), | |
1655 | REG_BR_PROB_BASE, CGRAPH_FREQ_BASE, | |
1656 | edge->frequency, true); | |
1657 | /* We could also just rescale the frequency, but | |
1658 | doing so would introduce roundoff errors and make | |
1659 | verifier unhappy. */ | |
b8698a0f | 1660 | edge->frequency |
0d63a740 JH |
1661 | = compute_call_stmt_bb_frequency (id->dst_node->decl, |
1662 | copy_basic_block); | |
1663 | if (dump_file | |
1664 | && profile_status_for_function (cfun) != PROFILE_ABSENT | |
1665 | && (edge_freq > edge->frequency + 10 | |
1666 | || edge_freq < edge->frequency - 10)) | |
1667 | { | |
1668 | fprintf (dump_file, "Edge frequency estimated by " | |
1669 | "cgraph %i diverge from inliner's estimate %i\n", | |
1670 | edge_freq, | |
1671 | edge->frequency); | |
1672 | fprintf (dump_file, | |
1673 | "Orig bb: %i, orig bb freq %i, new bb freq %i\n", | |
1674 | bb->index, | |
1675 | bb->frequency, | |
1676 | copy_basic_block->frequency); | |
1677 | } | |
8132a837 | 1678 | stmt = cgraph_redirect_edge_call_stmt_to_callee (edge); |
0d63a740 | 1679 | } |
9b2a5ef7 RH |
1680 | break; |
1681 | ||
1682 | case CB_CGE_MOVE_CLONES: | |
1683 | cgraph_set_call_stmt_including_clones (id->dst_node, | |
1684 | orig_stmt, stmt); | |
1685 | edge = cgraph_edge (id->dst_node, stmt); | |
1686 | break; | |
1687 | ||
1688 | case CB_CGE_MOVE: | |
1689 | edge = cgraph_edge (id->dst_node, orig_stmt); | |
1690 | if (edge) | |
1691 | cgraph_set_call_stmt (edge, stmt); | |
1692 | break; | |
1693 | ||
1694 | default: | |
1695 | gcc_unreachable (); | |
110cfe1c | 1696 | } |
f618d33e | 1697 | |
9b2a5ef7 RH |
1698 | /* Constant propagation on argument done during inlining |
1699 | may create new direct call. Produce an edge for it. */ | |
b8698a0f | 1700 | if ((!edge |
e33c6cd6 | 1701 | || (edge->indirect_inlining_edge |
9b2a5ef7 | 1702 | && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)) |
9b2a5ef7 RH |
1703 | && (fn = gimple_call_fndecl (stmt)) != NULL) |
1704 | { | |
1705 | struct cgraph_node *dest = cgraph_node (fn); | |
1706 | ||
1707 | /* We have missing edge in the callgraph. This can happen | |
1708 | when previous inlining turned an indirect call into a | |
0e3776db JH |
1709 | direct call by constant propagating arguments or we are |
1710 | producing dead clone (for further clonning). In all | |
9b2a5ef7 RH |
1711 | other cases we hit a bug (incorrect node sharing is the |
1712 | most common reason for missing edges). */ | |
0e3776db JH |
1713 | gcc_assert (dest->needed || !dest->analyzed |
1714 | || !id->src_node->analyzed); | |
9b2a5ef7 RH |
1715 | if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES) |
1716 | cgraph_create_edge_including_clones | |
47cb0d7d | 1717 | (id->dst_node, dest, orig_stmt, stmt, bb->count, |
b8698a0f | 1718 | compute_call_stmt_bb_frequency (id->dst_node->decl, |
0d63a740 | 1719 | copy_basic_block), |
9b2a5ef7 RH |
1720 | bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL); |
1721 | else | |
1722 | cgraph_create_edge (id->dst_node, dest, stmt, | |
47cb0d7d JH |
1723 | bb->count, |
1724 | compute_call_stmt_bb_frequency | |
1725 | (id->dst_node->decl, copy_basic_block), | |
9b2a5ef7 RH |
1726 | bb->loop_depth)->inline_failed |
1727 | = CIF_ORIGINALLY_INDIRECT_CALL; | |
1728 | if (dump_file) | |
1729 | { | |
1730 | fprintf (dump_file, "Created new direct edge to %s", | |
1731 | cgraph_node_name (dest)); | |
1732 | } | |
1733 | } | |
9187e02d | 1734 | |
f618d33e | 1735 | flags = gimple_call_flags (stmt); |
f618d33e MJ |
1736 | if (flags & ECF_MAY_BE_ALLOCA) |
1737 | cfun->calls_alloca = true; | |
1738 | if (flags & ECF_RETURNS_TWICE) | |
1739 | cfun->calls_setjmp = true; | |
726a989a | 1740 | } |
e21aff8a | 1741 | |
1d65f45c RH |
1742 | maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt, |
1743 | id->eh_map, id->eh_lp_nr); | |
726a989a | 1744 | |
b5b8b0ac | 1745 | if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt)) |
726a989a RB |
1746 | { |
1747 | ssa_op_iter i; | |
1748 | tree def; | |
1749 | ||
1750 | find_new_referenced_vars (gsi_stmt (copy_gsi)); | |
1751 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF) | |
1752 | if (TREE_CODE (def) == SSA_NAME) | |
1753 | SSA_NAME_DEF_STMT (def) = stmt; | |
1754 | } | |
1755 | ||
1756 | gsi_next (©_gsi); | |
e21aff8a | 1757 | } |
c2a4718a | 1758 | while (!gsi_end_p (copy_gsi)); |
726a989a RB |
1759 | |
1760 | copy_gsi = gsi_last_bb (copy_basic_block); | |
e21aff8a | 1761 | } |
726a989a | 1762 | |
e21aff8a SB |
1763 | return copy_basic_block; |
1764 | } | |
1765 | ||
110cfe1c JH |
1766 | /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA |
1767 | form is quite easy, since dominator relationship for old basic blocks does | |
1768 | not change. | |
1769 | ||
1770 | There is however exception where inlining might change dominator relation | |
1771 | across EH edges from basic block within inlined functions destinating | |
5305a4cb | 1772 | to landing pads in function we inline into. |
110cfe1c | 1773 | |
e9705dc5 AO |
1774 | The function fills in PHI_RESULTs of such PHI nodes if they refer |
1775 | to gimple regs. Otherwise, the function mark PHI_RESULT of such | |
1776 | PHI nodes for renaming. For non-gimple regs, renaming is safe: the | |
1777 | EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be | |
1778 | set, and this means that there will be no overlapping live ranges | |
110cfe1c JH |
1779 | for the underlying symbol. |
1780 | ||
1781 | This might change in future if we allow redirecting of EH edges and | |
1782 | we might want to change way build CFG pre-inlining to include | |
1783 | all the possible edges then. */ | |
1784 | static void | |
e9705dc5 AO |
1785 | update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb, |
1786 | bool can_throw, bool nonlocal_goto) | |
110cfe1c JH |
1787 | { |
1788 | edge e; | |
1789 | edge_iterator ei; | |
1790 | ||
1791 | FOR_EACH_EDGE (e, ei, bb->succs) | |
1792 | if (!e->dest->aux | |
1793 | || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK) | |
1794 | { | |
726a989a RB |
1795 | gimple phi; |
1796 | gimple_stmt_iterator si; | |
110cfe1c | 1797 | |
e9705dc5 AO |
1798 | if (!nonlocal_goto) |
1799 | gcc_assert (e->flags & EDGE_EH); | |
726a989a | 1800 | |
e9705dc5 AO |
1801 | if (!can_throw) |
1802 | gcc_assert (!(e->flags & EDGE_EH)); | |
726a989a RB |
1803 | |
1804 | for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si)) | |
110cfe1c | 1805 | { |
e9705dc5 AO |
1806 | edge re; |
1807 | ||
726a989a RB |
1808 | phi = gsi_stmt (si); |
1809 | ||
e9705dc5 AO |
1810 | /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */ |
1811 | gcc_assert (!e->dest->aux); | |
1812 | ||
496a4ef5 JH |
1813 | gcc_assert ((e->flags & EDGE_EH) |
1814 | || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi))); | |
e9705dc5 AO |
1815 | |
1816 | if (!is_gimple_reg (PHI_RESULT (phi))) | |
1817 | { | |
726a989a | 1818 | mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi))); |
e9705dc5 AO |
1819 | continue; |
1820 | } | |
1821 | ||
1822 | re = find_edge (ret_bb, e->dest); | |
1432b19f | 1823 | gcc_assert (re); |
e9705dc5 AO |
1824 | gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL)) |
1825 | == (e->flags & (EDGE_EH | EDGE_ABNORMAL))); | |
1826 | ||
1827 | SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), | |
1828 | USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re))); | |
110cfe1c JH |
1829 | } |
1830 | } | |
1831 | } | |
1832 | ||
726a989a | 1833 | |
128a79fb KH |
1834 | /* Copy edges from BB into its copy constructed earlier, scale profile |
1835 | accordingly. Edges will be taken care of later. Assume aux | |
1836 | pointers to point to the copies of each BB. */ | |
726a989a | 1837 | |
e21aff8a | 1838 | static void |
0178d644 | 1839 | copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb) |
e21aff8a | 1840 | { |
cceb1885 | 1841 | basic_block new_bb = (basic_block) bb->aux; |
e21aff8a SB |
1842 | edge_iterator ei; |
1843 | edge old_edge; | |
726a989a | 1844 | gimple_stmt_iterator si; |
e21aff8a SB |
1845 | int flags; |
1846 | ||
1847 | /* Use the indices from the original blocks to create edges for the | |
1848 | new ones. */ | |
1849 | FOR_EACH_EDGE (old_edge, ei, bb->succs) | |
e0704a46 JH |
1850 | if (!(old_edge->flags & EDGE_EH)) |
1851 | { | |
82d6e6fc | 1852 | edge new_edge; |
e21aff8a | 1853 | |
e0704a46 | 1854 | flags = old_edge->flags; |
e21aff8a | 1855 | |
e0704a46 JH |
1856 | /* Return edges do get a FALLTHRU flag when the get inlined. */ |
1857 | if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags | |
1858 | && old_edge->dest->aux != EXIT_BLOCK_PTR) | |
1859 | flags |= EDGE_FALLTHRU; | |
82d6e6fc KG |
1860 | new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags); |
1861 | new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE; | |
1862 | new_edge->probability = old_edge->probability; | |
e0704a46 | 1863 | } |
e21aff8a SB |
1864 | |
1865 | if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK) | |
1866 | return; | |
1867 | ||
726a989a | 1868 | for (si = gsi_start_bb (new_bb); !gsi_end_p (si);) |
e21aff8a | 1869 | { |
726a989a | 1870 | gimple copy_stmt; |
e9705dc5 | 1871 | bool can_throw, nonlocal_goto; |
e21aff8a | 1872 | |
726a989a | 1873 | copy_stmt = gsi_stmt (si); |
b5b8b0ac AO |
1874 | if (!is_gimple_debug (copy_stmt)) |
1875 | { | |
1876 | update_stmt (copy_stmt); | |
1877 | if (gimple_in_ssa_p (cfun)) | |
1878 | mark_symbols_for_renaming (copy_stmt); | |
1879 | } | |
726a989a | 1880 | |
e21aff8a | 1881 | /* Do this before the possible split_block. */ |
726a989a | 1882 | gsi_next (&si); |
e21aff8a SB |
1883 | |
1884 | /* If this tree could throw an exception, there are two | |
1885 | cases where we need to add abnormal edge(s): the | |
1886 | tree wasn't in a region and there is a "current | |
1887 | region" in the caller; or the original tree had | |
1888 | EH edges. In both cases split the block after the tree, | |
1889 | and add abnormal edge(s) as needed; we need both | |
1890 | those from the callee and the caller. | |
1891 | We check whether the copy can throw, because the const | |
1892 | propagation can change an INDIRECT_REF which throws | |
1893 | into a COMPONENT_REF which doesn't. If the copy | |
1894 | can throw, the original could also throw. */ | |
726a989a RB |
1895 | can_throw = stmt_can_throw_internal (copy_stmt); |
1896 | nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt); | |
e9705dc5 AO |
1897 | |
1898 | if (can_throw || nonlocal_goto) | |
e21aff8a | 1899 | { |
726a989a | 1900 | if (!gsi_end_p (si)) |
e21aff8a SB |
1901 | /* Note that bb's predecessor edges aren't necessarily |
1902 | right at this point; split_block doesn't care. */ | |
1903 | { | |
1904 | edge e = split_block (new_bb, copy_stmt); | |
110cfe1c | 1905 | |
e21aff8a | 1906 | new_bb = e->dest; |
110cfe1c | 1907 | new_bb->aux = e->src->aux; |
726a989a | 1908 | si = gsi_start_bb (new_bb); |
e21aff8a | 1909 | } |
e9705dc5 | 1910 | } |
e21aff8a | 1911 | |
1d65f45c RH |
1912 | if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH) |
1913 | make_eh_dispatch_edges (copy_stmt); | |
1914 | else if (can_throw) | |
e9705dc5 | 1915 | make_eh_edges (copy_stmt); |
110cfe1c | 1916 | |
e9705dc5 | 1917 | if (nonlocal_goto) |
726a989a | 1918 | make_abnormal_goto_edges (gimple_bb (copy_stmt), true); |
e9705dc5 AO |
1919 | |
1920 | if ((can_throw || nonlocal_goto) | |
1921 | && gimple_in_ssa_p (cfun)) | |
726a989a | 1922 | update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb, |
e9705dc5 | 1923 | can_throw, nonlocal_goto); |
110cfe1c JH |
1924 | } |
1925 | } | |
1926 | ||
1927 | /* Copy the PHIs. All blocks and edges are copied, some blocks | |
1928 | was possibly split and new outgoing EH edges inserted. | |
1929 | BB points to the block of original function and AUX pointers links | |
1930 | the original and newly copied blocks. */ | |
1931 | ||
1932 | static void | |
1933 | copy_phis_for_bb (basic_block bb, copy_body_data *id) | |
1934 | { | |
3d9a9f94 | 1935 | basic_block const new_bb = (basic_block) bb->aux; |
110cfe1c | 1936 | edge_iterator ei; |
726a989a RB |
1937 | gimple phi; |
1938 | gimple_stmt_iterator si; | |
110cfe1c | 1939 | |
726a989a | 1940 | for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si)) |
110cfe1c | 1941 | { |
726a989a RB |
1942 | tree res, new_res; |
1943 | gimple new_phi; | |
110cfe1c JH |
1944 | edge new_edge; |
1945 | ||
726a989a RB |
1946 | phi = gsi_stmt (si); |
1947 | res = PHI_RESULT (phi); | |
1948 | new_res = res; | |
110cfe1c JH |
1949 | if (is_gimple_reg (res)) |
1950 | { | |
726a989a | 1951 | walk_tree (&new_res, copy_tree_body_r, id, NULL); |
110cfe1c JH |
1952 | SSA_NAME_DEF_STMT (new_res) |
1953 | = new_phi = create_phi_node (new_res, new_bb); | |
1954 | FOR_EACH_EDGE (new_edge, ei, new_bb->preds) | |
1955 | { | |
726a989a RB |
1956 | edge const old_edge |
1957 | = find_edge ((basic_block) new_edge->src->aux, bb); | |
110cfe1c JH |
1958 | tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge); |
1959 | tree new_arg = arg; | |
726a989a RB |
1960 | tree block = id->block; |
1961 | id->block = NULL_TREE; | |
1962 | walk_tree (&new_arg, copy_tree_body_r, id, NULL); | |
1963 | id->block = block; | |
110cfe1c | 1964 | gcc_assert (new_arg); |
36b6e793 JJ |
1965 | /* With return slot optimization we can end up with |
1966 | non-gimple (foo *)&this->m, fix that here. */ | |
1967 | if (TREE_CODE (new_arg) != SSA_NAME | |
1968 | && TREE_CODE (new_arg) != FUNCTION_DECL | |
1969 | && !is_gimple_val (new_arg)) | |
1970 | { | |
726a989a RB |
1971 | gimple_seq stmts = NULL; |
1972 | new_arg = force_gimple_operand (new_arg, &stmts, true, NULL); | |
1973 | gsi_insert_seq_on_edge_immediate (new_edge, stmts); | |
36b6e793 | 1974 | } |
b8698a0f | 1975 | add_phi_arg (new_phi, new_arg, new_edge, |
f5045c96 | 1976 | gimple_phi_arg_location_from_edge (phi, old_edge)); |
110cfe1c | 1977 | } |
e21aff8a SB |
1978 | } |
1979 | } | |
1980 | } | |
1981 | ||
726a989a | 1982 | |
e21aff8a | 1983 | /* Wrapper for remap_decl so it can be used as a callback. */ |
726a989a | 1984 | |
e21aff8a SB |
1985 | static tree |
1986 | remap_decl_1 (tree decl, void *data) | |
1987 | { | |
1b369fae | 1988 | return remap_decl (decl, (copy_body_data *) data); |
e21aff8a SB |
1989 | } |
1990 | ||
110cfe1c JH |
1991 | /* Build struct function and associated datastructures for the new clone |
1992 | NEW_FNDECL to be build. CALLEE_FNDECL is the original */ | |
1993 | ||
1994 | static void | |
0d63a740 | 1995 | initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count) |
110cfe1c | 1996 | { |
110cfe1c | 1997 | struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
0d63a740 | 1998 | gcov_type count_scale; |
110cfe1c JH |
1999 | |
2000 | if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count) | |
2001 | count_scale = (REG_BR_PROB_BASE * count | |
2002 | / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count); | |
2003 | else | |
0d63a740 | 2004 | count_scale = REG_BR_PROB_BASE; |
110cfe1c JH |
2005 | |
2006 | /* Register specific tree functions. */ | |
726a989a | 2007 | gimple_register_cfg_hooks (); |
39ecc018 JH |
2008 | |
2009 | /* Get clean struct function. */ | |
2010 | push_struct_function (new_fndecl); | |
2011 | ||
2012 | /* We will rebuild these, so just sanity check that they are empty. */ | |
2013 | gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL); | |
2014 | gcc_assert (cfun->local_decls == NULL); | |
2015 | gcc_assert (cfun->cfg == NULL); | |
2016 | gcc_assert (cfun->decl == new_fndecl); | |
2017 | ||
39ecc018 JH |
2018 | /* Copy items we preserve during clonning. */ |
2019 | cfun->static_chain_decl = src_cfun->static_chain_decl; | |
2020 | cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area; | |
2021 | cfun->function_end_locus = src_cfun->function_end_locus; | |
2022 | cfun->curr_properties = src_cfun->curr_properties; | |
2023 | cfun->last_verified = src_cfun->last_verified; | |
39ecc018 JH |
2024 | cfun->va_list_gpr_size = src_cfun->va_list_gpr_size; |
2025 | cfun->va_list_fpr_size = src_cfun->va_list_fpr_size; | |
39ecc018 JH |
2026 | cfun->has_nonlocal_label = src_cfun->has_nonlocal_label; |
2027 | cfun->stdarg = src_cfun->stdarg; | |
2028 | cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p; | |
2029 | cfun->after_inlining = src_cfun->after_inlining; | |
2030 | cfun->returns_struct = src_cfun->returns_struct; | |
2031 | cfun->returns_pcc_struct = src_cfun->returns_pcc_struct; | |
2032 | cfun->after_tree_profile = src_cfun->after_tree_profile; | |
2033 | ||
110cfe1c JH |
2034 | init_empty_tree_cfg (); |
2035 | ||
0d63a740 | 2036 | profile_status_for_function (cfun) = profile_status_for_function (src_cfun); |
110cfe1c JH |
2037 | ENTRY_BLOCK_PTR->count = |
2038 | (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale / | |
2039 | REG_BR_PROB_BASE); | |
0d63a740 JH |
2040 | ENTRY_BLOCK_PTR->frequency |
2041 | = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency; | |
110cfe1c JH |
2042 | EXIT_BLOCK_PTR->count = |
2043 | (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale / | |
2044 | REG_BR_PROB_BASE); | |
2045 | EXIT_BLOCK_PTR->frequency = | |
0d63a740 | 2046 | EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency; |
110cfe1c JH |
2047 | if (src_cfun->eh) |
2048 | init_eh_for_function (); | |
2049 | ||
2050 | if (src_cfun->gimple_df) | |
2051 | { | |
5db9ba0c | 2052 | init_tree_ssa (cfun); |
110cfe1c JH |
2053 | cfun->gimple_df->in_ssa_p = true; |
2054 | init_ssa_operands (); | |
2055 | } | |
2056 | pop_cfun (); | |
2057 | } | |
2058 | ||
e21aff8a SB |
2059 | /* Make a copy of the body of FN so that it can be inserted inline in |
2060 | another function. Walks FN via CFG, returns new fndecl. */ | |
2061 | ||
2062 | static tree | |
0d63a740 | 2063 | copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale, |
e21aff8a SB |
2064 | basic_block entry_block_map, basic_block exit_block_map) |
2065 | { | |
1b369fae | 2066 | tree callee_fndecl = id->src_fn; |
e21aff8a | 2067 | /* Original cfun for the callee, doesn't change. */ |
1b369fae | 2068 | struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
110cfe1c | 2069 | struct function *cfun_to_copy; |
e21aff8a SB |
2070 | basic_block bb; |
2071 | tree new_fndecl = NULL; | |
0d63a740 | 2072 | gcov_type count_scale; |
110cfe1c | 2073 | int last; |
e21aff8a | 2074 | |
1b369fae | 2075 | if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count) |
e21aff8a | 2076 | count_scale = (REG_BR_PROB_BASE * count |
1b369fae | 2077 | / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count); |
e21aff8a | 2078 | else |
0d63a740 | 2079 | count_scale = REG_BR_PROB_BASE; |
e21aff8a SB |
2080 | |
2081 | /* Register specific tree functions. */ | |
726a989a | 2082 | gimple_register_cfg_hooks (); |
e21aff8a SB |
2083 | |
2084 | /* Must have a CFG here at this point. */ | |
2085 | gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION | |
2086 | (DECL_STRUCT_FUNCTION (callee_fndecl))); | |
2087 | ||
110cfe1c JH |
2088 | cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
2089 | ||
e21aff8a SB |
2090 | ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map; |
2091 | EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map; | |
110cfe1c JH |
2092 | entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy); |
2093 | exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy); | |
e21aff8a | 2094 | |
e21aff8a SB |
2095 | /* Duplicate any exception-handling regions. */ |
2096 | if (cfun->eh) | |
1d65f45c RH |
2097 | id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr, |
2098 | remap_decl_1, id); | |
726a989a | 2099 | |
e21aff8a SB |
2100 | /* Use aux pointers to map the original blocks to copy. */ |
2101 | FOR_EACH_BB_FN (bb, cfun_to_copy) | |
110cfe1c | 2102 | { |
82d6e6fc KG |
2103 | basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale); |
2104 | bb->aux = new_bb; | |
2105 | new_bb->aux = bb; | |
110cfe1c JH |
2106 | } |
2107 | ||
7c57be85 | 2108 | last = last_basic_block; |
726a989a | 2109 | |
e21aff8a SB |
2110 | /* Now that we've duplicated the blocks, duplicate their edges. */ |
2111 | FOR_ALL_BB_FN (bb, cfun_to_copy) | |
e9705dc5 | 2112 | copy_edges_for_bb (bb, count_scale, exit_block_map); |
726a989a | 2113 | |
110cfe1c JH |
2114 | if (gimple_in_ssa_p (cfun)) |
2115 | FOR_ALL_BB_FN (bb, cfun_to_copy) | |
2116 | copy_phis_for_bb (bb, id); | |
726a989a | 2117 | |
e21aff8a | 2118 | FOR_ALL_BB_FN (bb, cfun_to_copy) |
110cfe1c JH |
2119 | { |
2120 | ((basic_block)bb->aux)->aux = NULL; | |
2121 | bb->aux = NULL; | |
2122 | } | |
726a989a | 2123 | |
110cfe1c JH |
2124 | /* Zero out AUX fields of newly created block during EH edge |
2125 | insertion. */ | |
7c57be85 | 2126 | for (; last < last_basic_block; last++) |
110cfe1c JH |
2127 | BASIC_BLOCK (last)->aux = NULL; |
2128 | entry_block_map->aux = NULL; | |
2129 | exit_block_map->aux = NULL; | |
e21aff8a | 2130 | |
1d65f45c RH |
2131 | if (id->eh_map) |
2132 | { | |
2133 | pointer_map_destroy (id->eh_map); | |
2134 | id->eh_map = NULL; | |
2135 | } | |
2136 | ||
e21aff8a SB |
2137 | return new_fndecl; |
2138 | } | |
2139 | ||
b5b8b0ac AO |
2140 | /* Copy the debug STMT using ID. We deal with these statements in a |
2141 | special way: if any variable in their VALUE expression wasn't | |
2142 | remapped yet, we won't remap it, because that would get decl uids | |
2143 | out of sync, causing codegen differences between -g and -g0. If | |
2144 | this arises, we drop the VALUE expression altogether. */ | |
2145 | ||
2146 | static void | |
2147 | copy_debug_stmt (gimple stmt, copy_body_data *id) | |
2148 | { | |
2149 | tree t, *n; | |
2150 | struct walk_stmt_info wi; | |
2151 | ||
2152 | t = id->block; | |
2153 | if (gimple_block (stmt)) | |
2154 | { | |
2155 | tree *n; | |
2156 | n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt)); | |
2157 | if (n) | |
2158 | t = *n; | |
2159 | } | |
2160 | gimple_set_block (stmt, t); | |
2161 | ||
2162 | /* Remap all the operands in COPY. */ | |
2163 | memset (&wi, 0, sizeof (wi)); | |
2164 | wi.info = id; | |
2165 | ||
2166 | processing_debug_stmt = 1; | |
2167 | ||
2168 | t = gimple_debug_bind_get_var (stmt); | |
2169 | ||
2170 | if (TREE_CODE (t) == PARM_DECL && id->debug_map | |
2171 | && (n = (tree *) pointer_map_contains (id->debug_map, t))) | |
2172 | { | |
2173 | gcc_assert (TREE_CODE (*n) == VAR_DECL); | |
2174 | t = *n; | |
2175 | } | |
d17af147 JJ |
2176 | else if (TREE_CODE (t) == VAR_DECL |
2177 | && !TREE_STATIC (t) | |
2178 | && gimple_in_ssa_p (cfun) | |
2179 | && !pointer_map_contains (id->decl_map, t) | |
2180 | && !var_ann (t)) | |
2181 | /* T is a non-localized variable. */; | |
b5b8b0ac AO |
2182 | else |
2183 | walk_tree (&t, remap_gimple_op_r, &wi, NULL); | |
2184 | ||
2185 | gimple_debug_bind_set_var (stmt, t); | |
2186 | ||
2187 | if (gimple_debug_bind_has_value_p (stmt)) | |
2188 | walk_tree (gimple_debug_bind_get_value_ptr (stmt), | |
2189 | remap_gimple_op_r, &wi, NULL); | |
2190 | ||
2191 | /* Punt if any decl couldn't be remapped. */ | |
2192 | if (processing_debug_stmt < 0) | |
2193 | gimple_debug_bind_reset_value (stmt); | |
2194 | ||
2195 | processing_debug_stmt = 0; | |
2196 | ||
2197 | update_stmt (stmt); | |
2198 | if (gimple_in_ssa_p (cfun)) | |
2199 | mark_symbols_for_renaming (stmt); | |
2200 | } | |
2201 | ||
2202 | /* Process deferred debug stmts. In order to give values better odds | |
2203 | of being successfully remapped, we delay the processing of debug | |
2204 | stmts until all other stmts that might require remapping are | |
2205 | processed. */ | |
2206 | ||
2207 | static void | |
2208 | copy_debug_stmts (copy_body_data *id) | |
2209 | { | |
2210 | size_t i; | |
2211 | gimple stmt; | |
2212 | ||
2213 | if (!id->debug_stmts) | |
2214 | return; | |
2215 | ||
2216 | for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++) | |
2217 | copy_debug_stmt (stmt, id); | |
2218 | ||
2219 | VEC_free (gimple, heap, id->debug_stmts); | |
2220 | } | |
2221 | ||
f82a627c EB |
2222 | /* Make a copy of the body of SRC_FN so that it can be inserted inline in |
2223 | another function. */ | |
2224 | ||
2225 | static tree | |
2226 | copy_tree_body (copy_body_data *id) | |
2227 | { | |
2228 | tree fndecl = id->src_fn; | |
2229 | tree body = DECL_SAVED_TREE (fndecl); | |
2230 | ||
2231 | walk_tree (&body, copy_tree_body_r, id, NULL); | |
2232 | ||
2233 | return body; | |
2234 | } | |
2235 | ||
b5b8b0ac AO |
2236 | /* Make a copy of the body of FN so that it can be inserted inline in |
2237 | another function. */ | |
2238 | ||
e21aff8a | 2239 | static tree |
0d63a740 | 2240 | copy_body (copy_body_data *id, gcov_type count, int frequency_scale, |
e21aff8a SB |
2241 | basic_block entry_block_map, basic_block exit_block_map) |
2242 | { | |
1b369fae | 2243 | tree fndecl = id->src_fn; |
e21aff8a SB |
2244 | tree body; |
2245 | ||
2246 | /* If this body has a CFG, walk CFG and copy. */ | |
2247 | gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl))); | |
0d63a740 | 2248 | body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map); |
b5b8b0ac | 2249 | copy_debug_stmts (id); |
e21aff8a SB |
2250 | |
2251 | return body; | |
2252 | } | |
2253 | ||
04482133 AO |
2254 | /* Return true if VALUE is an ADDR_EXPR of an automatic variable |
2255 | defined in function FN, or of a data member thereof. */ | |
2256 | ||
2257 | static bool | |
2258 | self_inlining_addr_expr (tree value, tree fn) | |
2259 | { | |
2260 | tree var; | |
2261 | ||
2262 | if (TREE_CODE (value) != ADDR_EXPR) | |
2263 | return false; | |
2264 | ||
2265 | var = get_base_address (TREE_OPERAND (value, 0)); | |
e21aff8a | 2266 | |
50886bf1 | 2267 | return var && auto_var_in_fn_p (var, fn); |
04482133 AO |
2268 | } |
2269 | ||
b5b8b0ac AO |
2270 | /* Append to BB a debug annotation that binds VAR to VALUE, inheriting |
2271 | lexical block and line number information from base_stmt, if given, | |
2272 | or from the last stmt of the block otherwise. */ | |
2273 | ||
2274 | static gimple | |
2275 | insert_init_debug_bind (copy_body_data *id, | |
2276 | basic_block bb, tree var, tree value, | |
2277 | gimple base_stmt) | |
2278 | { | |
2279 | gimple note; | |
2280 | gimple_stmt_iterator gsi; | |
2281 | tree tracked_var; | |
2282 | ||
2283 | if (!gimple_in_ssa_p (id->src_cfun)) | |
2284 | return NULL; | |
2285 | ||
2286 | if (!MAY_HAVE_DEBUG_STMTS) | |
2287 | return NULL; | |
2288 | ||
2289 | tracked_var = target_for_debug_bind (var); | |
2290 | if (!tracked_var) | |
2291 | return NULL; | |
2292 | ||
2293 | if (bb) | |
2294 | { | |
2295 | gsi = gsi_last_bb (bb); | |
2296 | if (!base_stmt && !gsi_end_p (gsi)) | |
2297 | base_stmt = gsi_stmt (gsi); | |
2298 | } | |
2299 | ||
2300 | note = gimple_build_debug_bind (tracked_var, value, base_stmt); | |
2301 | ||
2302 | if (bb) | |
2303 | { | |
2304 | if (!gsi_end_p (gsi)) | |
2305 | gsi_insert_after (&gsi, note, GSI_SAME_STMT); | |
2306 | else | |
2307 | gsi_insert_before (&gsi, note, GSI_SAME_STMT); | |
2308 | } | |
2309 | ||
2310 | return note; | |
2311 | } | |
2312 | ||
6de9cd9a | 2313 | static void |
b5b8b0ac | 2314 | insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt) |
0f1961a2 | 2315 | { |
0f1961a2 JH |
2316 | /* If VAR represents a zero-sized variable, it's possible that the |
2317 | assignment statement may result in no gimple statements. */ | |
2318 | if (init_stmt) | |
c2a4718a JJ |
2319 | { |
2320 | gimple_stmt_iterator si = gsi_last_bb (bb); | |
0f1961a2 | 2321 | |
bfb0b886 RG |
2322 | /* We can end up with init statements that store to a non-register |
2323 | from a rhs with a conversion. Handle that here by forcing the | |
2324 | rhs into a temporary. gimple_regimplify_operands is not | |
2325 | prepared to do this for us. */ | |
b5b8b0ac AO |
2326 | if (!is_gimple_debug (init_stmt) |
2327 | && !is_gimple_reg (gimple_assign_lhs (init_stmt)) | |
bfb0b886 RG |
2328 | && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt))) |
2329 | && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS) | |
2330 | { | |
2331 | tree rhs = build1 (gimple_assign_rhs_code (init_stmt), | |
2332 | gimple_expr_type (init_stmt), | |
2333 | gimple_assign_rhs1 (init_stmt)); | |
2334 | rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false, | |
2335 | GSI_NEW_STMT); | |
2336 | gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs)); | |
2337 | gimple_assign_set_rhs1 (init_stmt, rhs); | |
2338 | } | |
c2a4718a JJ |
2339 | gsi_insert_after (&si, init_stmt, GSI_NEW_STMT); |
2340 | gimple_regimplify_operands (init_stmt, &si); | |
2341 | mark_symbols_for_renaming (init_stmt); | |
b5b8b0ac AO |
2342 | |
2343 | if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS) | |
2344 | { | |
2345 | tree var, def = gimple_assign_lhs (init_stmt); | |
2346 | ||
2347 | if (TREE_CODE (def) == SSA_NAME) | |
2348 | var = SSA_NAME_VAR (def); | |
2349 | else | |
2350 | var = def; | |
2351 | ||
2352 | insert_init_debug_bind (id, bb, var, def, init_stmt); | |
2353 | } | |
c2a4718a | 2354 | } |
0f1961a2 JH |
2355 | } |
2356 | ||
2357 | /* Initialize parameter P with VALUE. If needed, produce init statement | |
2358 | at the end of BB. When BB is NULL, we return init statement to be | |
2359 | output later. */ | |
2360 | static gimple | |
1b369fae | 2361 | setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn, |
e21aff8a | 2362 | basic_block bb, tree *vars) |
6de9cd9a | 2363 | { |
0f1961a2 | 2364 | gimple init_stmt = NULL; |
6de9cd9a | 2365 | tree var; |
f4088621 | 2366 | tree rhs = value; |
110cfe1c JH |
2367 | tree def = (gimple_in_ssa_p (cfun) |
2368 | ? gimple_default_def (id->src_cfun, p) : NULL); | |
6de9cd9a | 2369 | |
f4088621 RG |
2370 | if (value |
2371 | && value != error_mark_node | |
2372 | && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))) | |
c54e3854 RG |
2373 | { |
2374 | if (fold_convertible_p (TREE_TYPE (p), value)) | |
2375 | rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value); | |
2376 | else | |
2377 | /* ??? For valid (GIMPLE) programs we should not end up here. | |
2378 | Still if something has gone wrong and we end up with truly | |
2379 | mismatched types here, fall back to using a VIEW_CONVERT_EXPR | |
2380 | to not leak invalid GIMPLE to the following passes. */ | |
2381 | rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value); | |
2382 | } | |
f4088621 | 2383 | |
b5b8b0ac AO |
2384 | /* Make an equivalent VAR_DECL. Note that we must NOT remap the type |
2385 | here since the type of this decl must be visible to the calling | |
2386 | function. */ | |
2387 | var = copy_decl_to_var (p, id); | |
2388 | ||
2389 | /* We're actually using the newly-created var. */ | |
2390 | if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL) | |
2391 | { | |
2392 | get_var_ann (var); | |
2393 | add_referenced_var (var); | |
2394 | } | |
2395 | ||
2396 | /* Declare this new variable. */ | |
2397 | TREE_CHAIN (var) = *vars; | |
2398 | *vars = var; | |
2399 | ||
2400 | /* Make gimplifier happy about this variable. */ | |
2401 | DECL_SEEN_IN_BIND_EXPR_P (var) = 1; | |
2402 | ||
110cfe1c | 2403 | /* If the parameter is never assigned to, has no SSA_NAMEs created, |
b5b8b0ac AO |
2404 | we would not need to create a new variable here at all, if it |
2405 | weren't for debug info. Still, we can just use the argument | |
2406 | value. */ | |
6de9cd9a DN |
2407 | if (TREE_READONLY (p) |
2408 | && !TREE_ADDRESSABLE (p) | |
110cfe1c JH |
2409 | && value && !TREE_SIDE_EFFECTS (value) |
2410 | && !def) | |
6de9cd9a | 2411 | { |
84936f6f RH |
2412 | /* We may produce non-gimple trees by adding NOPs or introduce |
2413 | invalid sharing when operand is not really constant. | |
2414 | It is not big deal to prohibit constant propagation here as | |
2415 | we will constant propagate in DOM1 pass anyway. */ | |
2416 | if (is_gimple_min_invariant (value) | |
f4088621 RG |
2417 | && useless_type_conversion_p (TREE_TYPE (p), |
2418 | TREE_TYPE (value)) | |
04482133 AO |
2419 | /* We have to be very careful about ADDR_EXPR. Make sure |
2420 | the base variable isn't a local variable of the inlined | |
2421 | function, e.g., when doing recursive inlining, direct or | |
2422 | mutually-recursive or whatever, which is why we don't | |
2423 | just test whether fn == current_function_decl. */ | |
2424 | && ! self_inlining_addr_expr (value, fn)) | |
6de9cd9a | 2425 | { |
6de9cd9a | 2426 | insert_decl_map (id, p, value); |
b5b8b0ac AO |
2427 | insert_debug_decl_map (id, p, var); |
2428 | return insert_init_debug_bind (id, bb, var, value, NULL); | |
6de9cd9a DN |
2429 | } |
2430 | } | |
2431 | ||
6de9cd9a DN |
2432 | /* Register the VAR_DECL as the equivalent for the PARM_DECL; |
2433 | that way, when the PARM_DECL is encountered, it will be | |
2434 | automatically replaced by the VAR_DECL. */ | |
7c7d3047 | 2435 | insert_decl_map (id, p, var); |
6de9cd9a | 2436 | |
6de9cd9a DN |
2437 | /* Even if P was TREE_READONLY, the new VAR should not be. |
2438 | In the original code, we would have constructed a | |
2439 | temporary, and then the function body would have never | |
2440 | changed the value of P. However, now, we will be | |
2441 | constructing VAR directly. The constructor body may | |
2442 | change its value multiple times as it is being | |
2443 | constructed. Therefore, it must not be TREE_READONLY; | |
2444 | the back-end assumes that TREE_READONLY variable is | |
2445 | assigned to only once. */ | |
2446 | if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p))) | |
2447 | TREE_READONLY (var) = 0; | |
2448 | ||
110cfe1c JH |
2449 | /* If there is no setup required and we are in SSA, take the easy route |
2450 | replacing all SSA names representing the function parameter by the | |
2451 | SSA name passed to function. | |
2452 | ||
2453 | We need to construct map for the variable anyway as it might be used | |
2454 | in different SSA names when parameter is set in function. | |
2455 | ||
8454d27e JH |
2456 | Do replacement at -O0 for const arguments replaced by constant. |
2457 | This is important for builtin_constant_p and other construct requiring | |
b5b8b0ac | 2458 | constant argument to be visible in inlined function body. */ |
110cfe1c | 2459 | if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p) |
8454d27e JH |
2460 | && (optimize |
2461 | || (TREE_READONLY (p) | |
2462 | && is_gimple_min_invariant (rhs))) | |
110cfe1c | 2463 | && (TREE_CODE (rhs) == SSA_NAME |
9b718f81 JH |
2464 | || is_gimple_min_invariant (rhs)) |
2465 | && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)) | |
110cfe1c JH |
2466 | { |
2467 | insert_decl_map (id, def, rhs); | |
b5b8b0ac | 2468 | return insert_init_debug_bind (id, bb, var, rhs, NULL); |
110cfe1c JH |
2469 | } |
2470 | ||
f6f2da7d JH |
2471 | /* If the value of argument is never used, don't care about initializing |
2472 | it. */ | |
1cf5abb3 | 2473 | if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p)) |
f6f2da7d JH |
2474 | { |
2475 | gcc_assert (!value || !TREE_SIDE_EFFECTS (value)); | |
b5b8b0ac | 2476 | return insert_init_debug_bind (id, bb, var, rhs, NULL); |
f6f2da7d JH |
2477 | } |
2478 | ||
6de9cd9a DN |
2479 | /* Initialize this VAR_DECL from the equivalent argument. Convert |
2480 | the argument to the proper type in case it was promoted. */ | |
2481 | if (value) | |
2482 | { | |
6de9cd9a | 2483 | if (rhs == error_mark_node) |
110cfe1c | 2484 | { |
7c7d3047 | 2485 | insert_decl_map (id, p, var); |
b5b8b0ac | 2486 | return insert_init_debug_bind (id, bb, var, rhs, NULL); |
110cfe1c | 2487 | } |
afe08db5 | 2488 | |
73dab33b | 2489 | STRIP_USELESS_TYPE_CONVERSION (rhs); |
6de9cd9a | 2490 | |
726a989a | 2491 | /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we |
6de9cd9a | 2492 | keep our trees in gimple form. */ |
110cfe1c JH |
2493 | if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p)) |
2494 | { | |
2495 | def = remap_ssa_name (def, id); | |
726a989a | 2496 | init_stmt = gimple_build_assign (def, rhs); |
110cfe1c JH |
2497 | SSA_NAME_IS_DEFAULT_DEF (def) = 0; |
2498 | set_default_def (var, NULL); | |
2499 | } | |
2500 | else | |
726a989a | 2501 | init_stmt = gimple_build_assign (var, rhs); |
6de9cd9a | 2502 | |
0f1961a2 | 2503 | if (bb && init_stmt) |
b5b8b0ac | 2504 | insert_init_stmt (id, bb, init_stmt); |
6de9cd9a | 2505 | } |
0f1961a2 | 2506 | return init_stmt; |
6de9cd9a DN |
2507 | } |
2508 | ||
d4e4baa9 | 2509 | /* Generate code to initialize the parameters of the function at the |
726a989a | 2510 | top of the stack in ID from the GIMPLE_CALL STMT. */ |
d4e4baa9 | 2511 | |
e21aff8a | 2512 | static void |
726a989a | 2513 | initialize_inlined_parameters (copy_body_data *id, gimple stmt, |
e21aff8a | 2514 | tree fn, basic_block bb) |
d4e4baa9 | 2515 | { |
d4e4baa9 | 2516 | tree parms; |
726a989a | 2517 | size_t i; |
d4e4baa9 | 2518 | tree p; |
d436bff8 | 2519 | tree vars = NULL_TREE; |
726a989a | 2520 | tree static_chain = gimple_call_chain (stmt); |
d4e4baa9 AO |
2521 | |
2522 | /* Figure out what the parameters are. */ | |
18c6ada9 | 2523 | parms = DECL_ARGUMENTS (fn); |
d4e4baa9 | 2524 | |
d4e4baa9 AO |
2525 | /* Loop through the parameter declarations, replacing each with an |
2526 | equivalent VAR_DECL, appropriately initialized. */ | |
726a989a RB |
2527 | for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++) |
2528 | { | |
2529 | tree val; | |
2530 | val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL; | |
2531 | setup_one_parameter (id, p, val, fn, bb, &vars); | |
2532 | } | |
4838c5ee | 2533 | |
6de9cd9a DN |
2534 | /* Initialize the static chain. */ |
2535 | p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl; | |
ea99e0be | 2536 | gcc_assert (fn != current_function_decl); |
6de9cd9a DN |
2537 | if (p) |
2538 | { | |
2539 | /* No static chain? Seems like a bug in tree-nested.c. */ | |
1e128c5f | 2540 | gcc_assert (static_chain); |
4838c5ee | 2541 | |
e21aff8a | 2542 | setup_one_parameter (id, p, static_chain, fn, bb, &vars); |
4838c5ee AO |
2543 | } |
2544 | ||
e21aff8a | 2545 | declare_inline_vars (id->block, vars); |
d4e4baa9 AO |
2546 | } |
2547 | ||
726a989a | 2548 | |
e21aff8a SB |
2549 | /* Declare a return variable to replace the RESULT_DECL for the |
2550 | function we are calling. An appropriate DECL_STMT is returned. | |
2551 | The USE_STMT is filled to contain a use of the declaration to | |
2552 | indicate the return value of the function. | |
2553 | ||
110cfe1c JH |
2554 | RETURN_SLOT, if non-null is place where to store the result. It |
2555 | is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null, | |
726a989a | 2556 | was the LHS of the MODIFY_EXPR to which this call is the RHS. |
7740f00d | 2557 | |
0f900dfa JJ |
2558 | The return value is a (possibly null) value that holds the result |
2559 | as seen by the caller. */ | |
d4e4baa9 | 2560 | |
d436bff8 | 2561 | static tree |
0f900dfa | 2562 | declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest) |
d4e4baa9 | 2563 | { |
1b369fae RH |
2564 | tree callee = id->src_fn; |
2565 | tree caller = id->dst_fn; | |
7740f00d RH |
2566 | tree result = DECL_RESULT (callee); |
2567 | tree callee_type = TREE_TYPE (result); | |
ea2edf88 | 2568 | tree caller_type; |
7740f00d | 2569 | tree var, use; |
d4e4baa9 | 2570 | |
ea2edf88 RG |
2571 | /* Handle type-mismatches in the function declaration return type |
2572 | vs. the call expression. */ | |
2573 | if (modify_dest) | |
2574 | caller_type = TREE_TYPE (modify_dest); | |
2575 | else | |
2576 | caller_type = TREE_TYPE (TREE_TYPE (callee)); | |
2577 | ||
d4e4baa9 AO |
2578 | /* We don't need to do anything for functions that don't return |
2579 | anything. */ | |
7740f00d | 2580 | if (!result || VOID_TYPE_P (callee_type)) |
0f900dfa | 2581 | return NULL_TREE; |
d4e4baa9 | 2582 | |
cc77ae10 | 2583 | /* If there was a return slot, then the return value is the |
7740f00d | 2584 | dereferenced address of that object. */ |
110cfe1c | 2585 | if (return_slot) |
7740f00d | 2586 | { |
110cfe1c | 2587 | /* The front end shouldn't have used both return_slot and |
7740f00d | 2588 | a modify expression. */ |
1e128c5f | 2589 | gcc_assert (!modify_dest); |
cc77ae10 | 2590 | if (DECL_BY_REFERENCE (result)) |
110cfe1c JH |
2591 | { |
2592 | tree return_slot_addr = build_fold_addr_expr (return_slot); | |
2593 | STRIP_USELESS_TYPE_CONVERSION (return_slot_addr); | |
2594 | ||
2595 | /* We are going to construct *&return_slot and we can't do that | |
b8698a0f | 2596 | for variables believed to be not addressable. |
110cfe1c JH |
2597 | |
2598 | FIXME: This check possibly can match, because values returned | |
2599 | via return slot optimization are not believed to have address | |
2600 | taken by alias analysis. */ | |
2601 | gcc_assert (TREE_CODE (return_slot) != SSA_NAME); | |
2602 | if (gimple_in_ssa_p (cfun)) | |
2603 | { | |
2604 | HOST_WIDE_INT bitsize; | |
2605 | HOST_WIDE_INT bitpos; | |
2606 | tree offset; | |
2607 | enum machine_mode mode; | |
2608 | int unsignedp; | |
2609 | int volatilep; | |
2610 | tree base; | |
2611 | base = get_inner_reference (return_slot, &bitsize, &bitpos, | |
2612 | &offset, | |
2613 | &mode, &unsignedp, &volatilep, | |
2614 | false); | |
2615 | if (TREE_CODE (base) == INDIRECT_REF) | |
2616 | base = TREE_OPERAND (base, 0); | |
2617 | if (TREE_CODE (base) == SSA_NAME) | |
2618 | base = SSA_NAME_VAR (base); | |
2619 | mark_sym_for_renaming (base); | |
2620 | } | |
2621 | var = return_slot_addr; | |
2622 | } | |
cc77ae10 | 2623 | else |
110cfe1c JH |
2624 | { |
2625 | var = return_slot; | |
2626 | gcc_assert (TREE_CODE (var) != SSA_NAME); | |
b5ca517c | 2627 | TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result); |
110cfe1c | 2628 | } |
0890b981 AP |
2629 | if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE |
2630 | || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE) | |
2631 | && !DECL_GIMPLE_REG_P (result) | |
22918034 | 2632 | && DECL_P (var)) |
0890b981 | 2633 | DECL_GIMPLE_REG_P (var) = 0; |
7740f00d RH |
2634 | use = NULL; |
2635 | goto done; | |
2636 | } | |
2637 | ||
2638 | /* All types requiring non-trivial constructors should have been handled. */ | |
1e128c5f | 2639 | gcc_assert (!TREE_ADDRESSABLE (callee_type)); |
7740f00d RH |
2640 | |
2641 | /* Attempt to avoid creating a new temporary variable. */ | |
110cfe1c JH |
2642 | if (modify_dest |
2643 | && TREE_CODE (modify_dest) != SSA_NAME) | |
7740f00d RH |
2644 | { |
2645 | bool use_it = false; | |
2646 | ||
2647 | /* We can't use MODIFY_DEST if there's type promotion involved. */ | |
f4088621 | 2648 | if (!useless_type_conversion_p (callee_type, caller_type)) |
7740f00d RH |
2649 | use_it = false; |
2650 | ||
2651 | /* ??? If we're assigning to a variable sized type, then we must | |
2652 | reuse the destination variable, because we've no good way to | |
2653 | create variable sized temporaries at this point. */ | |
2654 | else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST) | |
2655 | use_it = true; | |
2656 | ||
2657 | /* If the callee cannot possibly modify MODIFY_DEST, then we can | |
2658 | reuse it as the result of the call directly. Don't do this if | |
2659 | it would promote MODIFY_DEST to addressable. */ | |
e2f9fe42 RH |
2660 | else if (TREE_ADDRESSABLE (result)) |
2661 | use_it = false; | |
2662 | else | |
2663 | { | |
2664 | tree base_m = get_base_address (modify_dest); | |
2665 | ||
2666 | /* If the base isn't a decl, then it's a pointer, and we don't | |
2667 | know where that's going to go. */ | |
2668 | if (!DECL_P (base_m)) | |
2669 | use_it = false; | |
2670 | else if (is_global_var (base_m)) | |
2671 | use_it = false; | |
0890b981 AP |
2672 | else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE |
2673 | || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE) | |
2674 | && !DECL_GIMPLE_REG_P (result) | |
2675 | && DECL_GIMPLE_REG_P (base_m)) | |
1d327c16 | 2676 | use_it = false; |
e2f9fe42 RH |
2677 | else if (!TREE_ADDRESSABLE (base_m)) |
2678 | use_it = true; | |
2679 | } | |
7740f00d RH |
2680 | |
2681 | if (use_it) | |
2682 | { | |
2683 | var = modify_dest; | |
2684 | use = NULL; | |
2685 | goto done; | |
2686 | } | |
2687 | } | |
2688 | ||
1e128c5f | 2689 | gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST); |
7740f00d | 2690 | |
c08cd4c1 | 2691 | var = copy_result_decl_to_var (result, id); |
110cfe1c JH |
2692 | if (gimple_in_ssa_p (cfun)) |
2693 | { | |
2694 | get_var_ann (var); | |
2695 | add_referenced_var (var); | |
2696 | } | |
e21aff8a | 2697 | |
7740f00d | 2698 | DECL_SEEN_IN_BIND_EXPR_P (var) = 1; |
cb91fab0 | 2699 | DECL_STRUCT_FUNCTION (caller)->local_decls |
7740f00d | 2700 | = tree_cons (NULL_TREE, var, |
cb91fab0 | 2701 | DECL_STRUCT_FUNCTION (caller)->local_decls); |
7740f00d | 2702 | |
6de9cd9a | 2703 | /* Do not have the rest of GCC warn about this variable as it should |
471854f8 | 2704 | not be visible to the user. */ |
6de9cd9a | 2705 | TREE_NO_WARNING (var) = 1; |
d4e4baa9 | 2706 | |
c08cd4c1 JM |
2707 | declare_inline_vars (id->block, var); |
2708 | ||
7740f00d RH |
2709 | /* Build the use expr. If the return type of the function was |
2710 | promoted, convert it back to the expected type. */ | |
2711 | use = var; | |
f4088621 | 2712 | if (!useless_type_conversion_p (caller_type, TREE_TYPE (var))) |
7740f00d | 2713 | use = fold_convert (caller_type, var); |
b8698a0f | 2714 | |
73dab33b | 2715 | STRIP_USELESS_TYPE_CONVERSION (use); |
7740f00d | 2716 | |
c08cd4c1 | 2717 | if (DECL_BY_REFERENCE (result)) |
32848948 RG |
2718 | { |
2719 | TREE_ADDRESSABLE (var) = 1; | |
2720 | var = build_fold_addr_expr (var); | |
2721 | } | |
c08cd4c1 | 2722 | |
7740f00d | 2723 | done: |
d4e4baa9 AO |
2724 | /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that |
2725 | way, when the RESULT_DECL is encountered, it will be | |
2726 | automatically replaced by the VAR_DECL. */ | |
5e20bdd7 | 2727 | insert_decl_map (id, result, var); |
d4e4baa9 | 2728 | |
6de9cd9a DN |
2729 | /* Remember this so we can ignore it in remap_decls. */ |
2730 | id->retvar = var; | |
2731 | ||
0f900dfa | 2732 | return use; |
d4e4baa9 AO |
2733 | } |
2734 | ||
27dbd3ac RH |
2735 | /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference |
2736 | to a local label. */ | |
4838c5ee | 2737 | |
27dbd3ac RH |
2738 | static tree |
2739 | has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp) | |
4838c5ee | 2740 | { |
27dbd3ac RH |
2741 | tree node = *nodep; |
2742 | tree fn = (tree) fnp; | |
726a989a | 2743 | |
27dbd3ac RH |
2744 | if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn) |
2745 | return node; | |
2746 | ||
2747 | if (TYPE_P (node)) | |
2748 | *walk_subtrees = 0; | |
2749 | ||
2750 | return NULL_TREE; | |
2751 | } | |
726a989a | 2752 | |
27dbd3ac RH |
2753 | /* Determine if the function can be copied. If so return NULL. If |
2754 | not return a string describng the reason for failure. */ | |
2755 | ||
2756 | static const char * | |
2757 | copy_forbidden (struct function *fun, tree fndecl) | |
2758 | { | |
2759 | const char *reason = fun->cannot_be_copied_reason; | |
2760 | tree step; | |
2761 | ||
2762 | /* Only examine the function once. */ | |
2763 | if (fun->cannot_be_copied_set) | |
2764 | return reason; | |
2765 | ||
2766 | /* We cannot copy a function that receives a non-local goto | |
2767 | because we cannot remap the destination label used in the | |
2768 | function that is performing the non-local goto. */ | |
2769 | /* ??? Actually, this should be possible, if we work at it. | |
2770 | No doubt there's just a handful of places that simply | |
2771 | assume it doesn't happen and don't substitute properly. */ | |
2772 | if (fun->has_nonlocal_label) | |
2773 | { | |
2774 | reason = G_("function %q+F can never be copied " | |
2775 | "because it receives a non-local goto"); | |
2776 | goto fail; | |
2777 | } | |
2778 | ||
2779 | for (step = fun->local_decls; step; step = TREE_CHAIN (step)) | |
2780 | { | |
2781 | tree decl = TREE_VALUE (step); | |
2782 | ||
2783 | if (TREE_CODE (decl) == VAR_DECL | |
2784 | && TREE_STATIC (decl) | |
2785 | && !DECL_EXTERNAL (decl) | |
2786 | && DECL_INITIAL (decl) | |
2787 | && walk_tree_without_duplicates (&DECL_INITIAL (decl), | |
2788 | has_label_address_in_static_1, | |
2789 | fndecl)) | |
2790 | { | |
2791 | reason = G_("function %q+F can never be copied because it saves " | |
2792 | "address of local label in a static variable"); | |
2793 | goto fail; | |
2794 | } | |
27dbd3ac RH |
2795 | } |
2796 | ||
2797 | fail: | |
2798 | fun->cannot_be_copied_reason = reason; | |
2799 | fun->cannot_be_copied_set = true; | |
2800 | return reason; | |
2801 | } | |
2802 | ||
2803 | ||
2804 | static const char *inline_forbidden_reason; | |
2805 | ||
2806 | /* A callback for walk_gimple_seq to handle statements. Returns non-null | |
2807 | iff a function can not be inlined. Also sets the reason why. */ | |
c986baf6 | 2808 | |
c986baf6 | 2809 | static tree |
726a989a RB |
2810 | inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p, |
2811 | struct walk_stmt_info *wip) | |
c986baf6 | 2812 | { |
726a989a | 2813 | tree fn = (tree) wip->info; |
f08545a8 | 2814 | tree t; |
726a989a | 2815 | gimple stmt = gsi_stmt (*gsi); |
c986baf6 | 2816 | |
726a989a | 2817 | switch (gimple_code (stmt)) |
f08545a8 | 2818 | { |
726a989a | 2819 | case GIMPLE_CALL: |
3197c4fd AS |
2820 | /* Refuse to inline alloca call unless user explicitly forced so as |
2821 | this may change program's memory overhead drastically when the | |
2822 | function using alloca is called in loop. In GCC present in | |
2823 | SPEC2000 inlining into schedule_block cause it to require 2GB of | |
2824 | RAM instead of 256MB. */ | |
726a989a | 2825 | if (gimple_alloca_call_p (stmt) |
f08545a8 JH |
2826 | && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))) |
2827 | { | |
ddd2d57e | 2828 | inline_forbidden_reason |
dee15844 | 2829 | = G_("function %q+F can never be inlined because it uses " |
ddd2d57e | 2830 | "alloca (override using the always_inline attribute)"); |
726a989a RB |
2831 | *handled_ops_p = true; |
2832 | return fn; | |
f08545a8 | 2833 | } |
726a989a RB |
2834 | |
2835 | t = gimple_call_fndecl (stmt); | |
2836 | if (t == NULL_TREE) | |
f08545a8 | 2837 | break; |
84f5e1b1 | 2838 | |
f08545a8 JH |
2839 | /* We cannot inline functions that call setjmp. */ |
2840 | if (setjmp_call_p (t)) | |
2841 | { | |
ddd2d57e | 2842 | inline_forbidden_reason |
dee15844 | 2843 | = G_("function %q+F can never be inlined because it uses setjmp"); |
726a989a RB |
2844 | *handled_ops_p = true; |
2845 | return t; | |
f08545a8 JH |
2846 | } |
2847 | ||
6de9cd9a | 2848 | if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL) |
3197c4fd | 2849 | switch (DECL_FUNCTION_CODE (t)) |
f08545a8 | 2850 | { |
3197c4fd AS |
2851 | /* We cannot inline functions that take a variable number of |
2852 | arguments. */ | |
2853 | case BUILT_IN_VA_START: | |
3197c4fd AS |
2854 | case BUILT_IN_NEXT_ARG: |
2855 | case BUILT_IN_VA_END: | |
6de9cd9a | 2856 | inline_forbidden_reason |
dee15844 | 2857 | = G_("function %q+F can never be inlined because it " |
6de9cd9a | 2858 | "uses variable argument lists"); |
726a989a RB |
2859 | *handled_ops_p = true; |
2860 | return t; | |
6de9cd9a | 2861 | |
3197c4fd | 2862 | case BUILT_IN_LONGJMP: |
6de9cd9a DN |
2863 | /* We can't inline functions that call __builtin_longjmp at |
2864 | all. The non-local goto machinery really requires the | |
2865 | destination be in a different function. If we allow the | |
2866 | function calling __builtin_longjmp to be inlined into the | |
2867 | function calling __builtin_setjmp, Things will Go Awry. */ | |
2868 | inline_forbidden_reason | |
dee15844 | 2869 | = G_("function %q+F can never be inlined because " |
6de9cd9a | 2870 | "it uses setjmp-longjmp exception handling"); |
726a989a RB |
2871 | *handled_ops_p = true; |
2872 | return t; | |
6de9cd9a DN |
2873 | |
2874 | case BUILT_IN_NONLOCAL_GOTO: | |
2875 | /* Similarly. */ | |
2876 | inline_forbidden_reason | |
dee15844 | 2877 | = G_("function %q+F can never be inlined because " |
6de9cd9a | 2878 | "it uses non-local goto"); |
726a989a RB |
2879 | *handled_ops_p = true; |
2880 | return t; | |
f08545a8 | 2881 | |
4b284111 JJ |
2882 | case BUILT_IN_RETURN: |
2883 | case BUILT_IN_APPLY_ARGS: | |
2884 | /* If a __builtin_apply_args caller would be inlined, | |
2885 | it would be saving arguments of the function it has | |
2886 | been inlined into. Similarly __builtin_return would | |
2887 | return from the function the inline has been inlined into. */ | |
2888 | inline_forbidden_reason | |
dee15844 | 2889 | = G_("function %q+F can never be inlined because " |
4b284111 | 2890 | "it uses __builtin_return or __builtin_apply_args"); |
726a989a RB |
2891 | *handled_ops_p = true; |
2892 | return t; | |
4b284111 | 2893 | |
3197c4fd AS |
2894 | default: |
2895 | break; | |
2896 | } | |
f08545a8 JH |
2897 | break; |
2898 | ||
726a989a RB |
2899 | case GIMPLE_GOTO: |
2900 | t = gimple_goto_dest (stmt); | |
f08545a8 JH |
2901 | |
2902 | /* We will not inline a function which uses computed goto. The | |
2903 | addresses of its local labels, which may be tucked into | |
2904 | global storage, are of course not constant across | |
2905 | instantiations, which causes unexpected behavior. */ | |
2906 | if (TREE_CODE (t) != LABEL_DECL) | |
2907 | { | |
ddd2d57e | 2908 | inline_forbidden_reason |
dee15844 | 2909 | = G_("function %q+F can never be inlined " |
ddd2d57e | 2910 | "because it contains a computed goto"); |
726a989a RB |
2911 | *handled_ops_p = true; |
2912 | return t; | |
f08545a8 | 2913 | } |
6de9cd9a | 2914 | break; |
f08545a8 | 2915 | |
f08545a8 JH |
2916 | default: |
2917 | break; | |
2918 | } | |
2919 | ||
726a989a | 2920 | *handled_ops_p = false; |
f08545a8 | 2921 | return NULL_TREE; |
84f5e1b1 RH |
2922 | } |
2923 | ||
726a989a RB |
2924 | /* Return true if FNDECL is a function that cannot be inlined into |
2925 | another one. */ | |
2926 | ||
2927 | static bool | |
f08545a8 | 2928 | inline_forbidden_p (tree fndecl) |
84f5e1b1 | 2929 | { |
2092ee7d | 2930 | struct function *fun = DECL_STRUCT_FUNCTION (fndecl); |
726a989a RB |
2931 | struct walk_stmt_info wi; |
2932 | struct pointer_set_t *visited_nodes; | |
2933 | basic_block bb; | |
2934 | bool forbidden_p = false; | |
2935 | ||
27dbd3ac RH |
2936 | /* First check for shared reasons not to copy the code. */ |
2937 | inline_forbidden_reason = copy_forbidden (fun, fndecl); | |
2938 | if (inline_forbidden_reason != NULL) | |
2939 | return true; | |
2940 | ||
2941 | /* Next, walk the statements of the function looking for | |
2942 | constraucts we can't handle, or are non-optimal for inlining. */ | |
726a989a RB |
2943 | visited_nodes = pointer_set_create (); |
2944 | memset (&wi, 0, sizeof (wi)); | |
2945 | wi.info = (void *) fndecl; | |
2946 | wi.pset = visited_nodes; | |
e21aff8a | 2947 | |
2092ee7d | 2948 | FOR_EACH_BB_FN (bb, fun) |
726a989a RB |
2949 | { |
2950 | gimple ret; | |
2951 | gimple_seq seq = bb_seq (bb); | |
27dbd3ac | 2952 | ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi); |
726a989a RB |
2953 | forbidden_p = (ret != NULL); |
2954 | if (forbidden_p) | |
27dbd3ac | 2955 | break; |
2092ee7d JJ |
2956 | } |
2957 | ||
726a989a | 2958 | pointer_set_destroy (visited_nodes); |
726a989a | 2959 | return forbidden_p; |
84f5e1b1 RH |
2960 | } |
2961 | ||
b3c3af2f SB |
2962 | /* Returns nonzero if FN is a function that does not have any |
2963 | fundamental inline blocking properties. */ | |
d4e4baa9 | 2964 | |
27dbd3ac RH |
2965 | bool |
2966 | tree_inlinable_function_p (tree fn) | |
d4e4baa9 | 2967 | { |
b3c3af2f | 2968 | bool inlinable = true; |
18177c7e RG |
2969 | bool do_warning; |
2970 | tree always_inline; | |
d4e4baa9 AO |
2971 | |
2972 | /* If we've already decided this function shouldn't be inlined, | |
2973 | there's no need to check again. */ | |
2974 | if (DECL_UNINLINABLE (fn)) | |
b3c3af2f | 2975 | return false; |
d4e4baa9 | 2976 | |
18177c7e RG |
2977 | /* We only warn for functions declared `inline' by the user. */ |
2978 | do_warning = (warn_inline | |
18177c7e | 2979 | && DECL_DECLARED_INLINE_P (fn) |
0494626a | 2980 | && !DECL_NO_INLINE_WARNING_P (fn) |
18177c7e RG |
2981 | && !DECL_IN_SYSTEM_HEADER (fn)); |
2982 | ||
2983 | always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)); | |
2984 | ||
e90acd93 | 2985 | if (flag_no_inline |
18177c7e RG |
2986 | && always_inline == NULL) |
2987 | { | |
2988 | if (do_warning) | |
2989 | warning (OPT_Winline, "function %q+F can never be inlined because it " | |
2990 | "is suppressed using -fno-inline", fn); | |
2991 | inlinable = false; | |
2992 | } | |
2993 | ||
2994 | /* Don't auto-inline anything that might not be bound within | |
2995 | this unit of translation. */ | |
2996 | else if (!DECL_DECLARED_INLINE_P (fn) | |
2997 | && DECL_REPLACEABLE_P (fn)) | |
2998 | inlinable = false; | |
2999 | ||
3000 | else if (!function_attribute_inlinable_p (fn)) | |
3001 | { | |
3002 | if (do_warning) | |
3003 | warning (OPT_Winline, "function %q+F can never be inlined because it " | |
3004 | "uses attributes conflicting with inlining", fn); | |
3005 | inlinable = false; | |
3006 | } | |
46c5ad27 | 3007 | |
f08545a8 | 3008 | else if (inline_forbidden_p (fn)) |
b3c3af2f SB |
3009 | { |
3010 | /* See if we should warn about uninlinable functions. Previously, | |
3011 | some of these warnings would be issued while trying to expand | |
3012 | the function inline, but that would cause multiple warnings | |
3013 | about functions that would for example call alloca. But since | |
3014 | this a property of the function, just one warning is enough. | |
3015 | As a bonus we can now give more details about the reason why a | |
18177c7e RG |
3016 | function is not inlinable. */ |
3017 | if (always_inline) | |
dee15844 | 3018 | sorry (inline_forbidden_reason, fn); |
2d327012 | 3019 | else if (do_warning) |
d2fcbf6f | 3020 | warning (OPT_Winline, inline_forbidden_reason, fn); |
b3c3af2f SB |
3021 | |
3022 | inlinable = false; | |
3023 | } | |
d4e4baa9 AO |
3024 | |
3025 | /* Squirrel away the result so that we don't have to check again. */ | |
b3c3af2f | 3026 | DECL_UNINLINABLE (fn) = !inlinable; |
d4e4baa9 | 3027 | |
b3c3af2f SB |
3028 | return inlinable; |
3029 | } | |
3030 | ||
e5c4f28a RG |
3031 | /* Estimate the cost of a memory move. Use machine dependent |
3032 | word size and take possible memcpy call into account. */ | |
3033 | ||
3034 | int | |
3035 | estimate_move_cost (tree type) | |
3036 | { | |
3037 | HOST_WIDE_INT size; | |
3038 | ||
078c3644 JH |
3039 | gcc_assert (!VOID_TYPE_P (type)); |
3040 | ||
e5c4f28a RG |
3041 | size = int_size_in_bytes (type); |
3042 | ||
e04ad03d | 3043 | if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size)) |
e5c4f28a RG |
3044 | /* Cost of a memcpy call, 3 arguments and the call. */ |
3045 | return 4; | |
3046 | else | |
3047 | return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES); | |
3048 | } | |
3049 | ||
726a989a | 3050 | /* Returns cost of operation CODE, according to WEIGHTS */ |
7f9bc51b | 3051 | |
726a989a | 3052 | static int |
02f0b13a JH |
3053 | estimate_operator_cost (enum tree_code code, eni_weights *weights, |
3054 | tree op1 ATTRIBUTE_UNUSED, tree op2) | |
6de9cd9a | 3055 | { |
726a989a | 3056 | switch (code) |
6de9cd9a | 3057 | { |
726a989a RB |
3058 | /* These are "free" conversions, or their presumed cost |
3059 | is folded into other operations. */ | |
61fcaeec | 3060 | case RANGE_EXPR: |
1a87cf0c | 3061 | CASE_CONVERT: |
726a989a RB |
3062 | case COMPLEX_EXPR: |
3063 | case PAREN_EXPR: | |
726a989a | 3064 | return 0; |
6de9cd9a | 3065 | |
e5c4f28a RG |
3066 | /* Assign cost of 1 to usual operations. |
3067 | ??? We may consider mapping RTL costs to this. */ | |
6de9cd9a | 3068 | case COND_EXPR: |
4151978d | 3069 | case VEC_COND_EXPR: |
6de9cd9a DN |
3070 | |
3071 | case PLUS_EXPR: | |
5be014d5 | 3072 | case POINTER_PLUS_EXPR: |
6de9cd9a DN |
3073 | case MINUS_EXPR: |
3074 | case MULT_EXPR: | |
3075 | ||
09e881c9 | 3076 | case ADDR_SPACE_CONVERT_EXPR: |
325217ed | 3077 | case FIXED_CONVERT_EXPR: |
6de9cd9a | 3078 | case FIX_TRUNC_EXPR: |
6de9cd9a DN |
3079 | |
3080 | case NEGATE_EXPR: | |
3081 | case FLOAT_EXPR: | |
3082 | case MIN_EXPR: | |
3083 | case MAX_EXPR: | |
3084 | case ABS_EXPR: | |
3085 | ||
3086 | case LSHIFT_EXPR: | |
3087 | case RSHIFT_EXPR: | |
3088 | case LROTATE_EXPR: | |
3089 | case RROTATE_EXPR: | |
a6b46ba2 DN |
3090 | case VEC_LSHIFT_EXPR: |
3091 | case VEC_RSHIFT_EXPR: | |
6de9cd9a DN |
3092 | |
3093 | case BIT_IOR_EXPR: | |
3094 | case BIT_XOR_EXPR: | |
3095 | case BIT_AND_EXPR: | |
3096 | case BIT_NOT_EXPR: | |
3097 | ||
3098 | case TRUTH_ANDIF_EXPR: | |
3099 | case TRUTH_ORIF_EXPR: | |
3100 | case TRUTH_AND_EXPR: | |
3101 | case TRUTH_OR_EXPR: | |
3102 | case TRUTH_XOR_EXPR: | |
3103 | case TRUTH_NOT_EXPR: | |
3104 | ||
3105 | case LT_EXPR: | |
3106 | case LE_EXPR: | |
3107 | case GT_EXPR: | |
3108 | case GE_EXPR: | |
3109 | case EQ_EXPR: | |
3110 | case NE_EXPR: | |
3111 | case ORDERED_EXPR: | |
3112 | case UNORDERED_EXPR: | |
3113 | ||
3114 | case UNLT_EXPR: | |
3115 | case UNLE_EXPR: | |
3116 | case UNGT_EXPR: | |
3117 | case UNGE_EXPR: | |
3118 | case UNEQ_EXPR: | |
d1a7edaf | 3119 | case LTGT_EXPR: |
6de9cd9a | 3120 | |
6de9cd9a DN |
3121 | case CONJ_EXPR: |
3122 | ||
3123 | case PREDECREMENT_EXPR: | |
3124 | case PREINCREMENT_EXPR: | |
3125 | case POSTDECREMENT_EXPR: | |
3126 | case POSTINCREMENT_EXPR: | |
3127 | ||
16630a2c DN |
3128 | case REALIGN_LOAD_EXPR: |
3129 | ||
61d3cdbb DN |
3130 | case REDUC_MAX_EXPR: |
3131 | case REDUC_MIN_EXPR: | |
3132 | case REDUC_PLUS_EXPR: | |
20f06221 | 3133 | case WIDEN_SUM_EXPR: |
726a989a RB |
3134 | case WIDEN_MULT_EXPR: |
3135 | case DOT_PROD_EXPR: | |
3136 | ||
89d67cca DN |
3137 | case VEC_WIDEN_MULT_HI_EXPR: |
3138 | case VEC_WIDEN_MULT_LO_EXPR: | |
3139 | case VEC_UNPACK_HI_EXPR: | |
3140 | case VEC_UNPACK_LO_EXPR: | |
d9987fb4 UB |
3141 | case VEC_UNPACK_FLOAT_HI_EXPR: |
3142 | case VEC_UNPACK_FLOAT_LO_EXPR: | |
8115817b | 3143 | case VEC_PACK_TRUNC_EXPR: |
89d67cca | 3144 | case VEC_PACK_SAT_EXPR: |
d9987fb4 | 3145 | case VEC_PACK_FIX_TRUNC_EXPR: |
98b44b0e IR |
3146 | case VEC_EXTRACT_EVEN_EXPR: |
3147 | case VEC_EXTRACT_ODD_EXPR: | |
3148 | case VEC_INTERLEAVE_HIGH_EXPR: | |
3149 | case VEC_INTERLEAVE_LOW_EXPR: | |
3150 | ||
726a989a | 3151 | return 1; |
6de9cd9a | 3152 | |
1ea7e6ad | 3153 | /* Few special cases of expensive operations. This is useful |
6de9cd9a DN |
3154 | to avoid inlining on functions having too many of these. */ |
3155 | case TRUNC_DIV_EXPR: | |
3156 | case CEIL_DIV_EXPR: | |
3157 | case FLOOR_DIV_EXPR: | |
3158 | case ROUND_DIV_EXPR: | |
3159 | case EXACT_DIV_EXPR: | |
3160 | case TRUNC_MOD_EXPR: | |
3161 | case CEIL_MOD_EXPR: | |
3162 | case FLOOR_MOD_EXPR: | |
3163 | case ROUND_MOD_EXPR: | |
3164 | case RDIV_EXPR: | |
02f0b13a JH |
3165 | if (TREE_CODE (op2) != INTEGER_CST) |
3166 | return weights->div_mod_cost; | |
3167 | return 1; | |
726a989a RB |
3168 | |
3169 | default: | |
3170 | /* We expect a copy assignment with no operator. */ | |
3171 | gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS); | |
3172 | return 0; | |
3173 | } | |
3174 | } | |
3175 | ||
3176 | ||
3177 | /* Estimate number of instructions that will be created by expanding | |
3178 | the statements in the statement sequence STMTS. | |
3179 | WEIGHTS contains weights attributed to various constructs. */ | |
3180 | ||
3181 | static | |
3182 | int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights) | |
3183 | { | |
3184 | int cost; | |
3185 | gimple_stmt_iterator gsi; | |
3186 | ||
3187 | cost = 0; | |
3188 | for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3189 | cost += estimate_num_insns (gsi_stmt (gsi), weights); | |
3190 | ||
3191 | return cost; | |
3192 | } | |
3193 | ||
3194 | ||
3195 | /* Estimate number of instructions that will be created by expanding STMT. | |
3196 | WEIGHTS contains weights attributed to various constructs. */ | |
3197 | ||
3198 | int | |
3199 | estimate_num_insns (gimple stmt, eni_weights *weights) | |
3200 | { | |
3201 | unsigned cost, i; | |
3202 | enum gimple_code code = gimple_code (stmt); | |
3203 | tree lhs; | |
02f0b13a | 3204 | tree rhs; |
726a989a RB |
3205 | |
3206 | switch (code) | |
3207 | { | |
3208 | case GIMPLE_ASSIGN: | |
3209 | /* Try to estimate the cost of assignments. We have three cases to | |
3210 | deal with: | |
3211 | 1) Simple assignments to registers; | |
3212 | 2) Stores to things that must live in memory. This includes | |
3213 | "normal" stores to scalars, but also assignments of large | |
3214 | structures, or constructors of big arrays; | |
3215 | ||
3216 | Let us look at the first two cases, assuming we have "a = b + C": | |
3217 | <GIMPLE_ASSIGN <var_decl "a"> | |
3218 | <plus_expr <var_decl "b"> <constant C>> | |
3219 | If "a" is a GIMPLE register, the assignment to it is free on almost | |
3220 | any target, because "a" usually ends up in a real register. Hence | |
3221 | the only cost of this expression comes from the PLUS_EXPR, and we | |
3222 | can ignore the GIMPLE_ASSIGN. | |
3223 | If "a" is not a GIMPLE register, the assignment to "a" will most | |
3224 | likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost | |
3225 | of moving something into "a", which we compute using the function | |
3226 | estimate_move_cost. */ | |
3227 | lhs = gimple_assign_lhs (stmt); | |
02f0b13a JH |
3228 | rhs = gimple_assign_rhs1 (stmt); |
3229 | ||
726a989a RB |
3230 | if (is_gimple_reg (lhs)) |
3231 | cost = 0; | |
3232 | else | |
3233 | cost = estimate_move_cost (TREE_TYPE (lhs)); | |
3234 | ||
02f0b13a JH |
3235 | if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs)) |
3236 | cost += estimate_move_cost (TREE_TYPE (rhs)); | |
3237 | ||
3238 | cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights, | |
3239 | gimple_assign_rhs1 (stmt), | |
3240 | get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
3241 | == GIMPLE_BINARY_RHS | |
3242 | ? gimple_assign_rhs2 (stmt) : NULL); | |
726a989a RB |
3243 | break; |
3244 | ||
3245 | case GIMPLE_COND: | |
02f0b13a JH |
3246 | cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights, |
3247 | gimple_op (stmt, 0), | |
3248 | gimple_op (stmt, 1)); | |
726a989a RB |
3249 | break; |
3250 | ||
3251 | case GIMPLE_SWITCH: | |
3252 | /* Take into account cost of the switch + guess 2 conditional jumps for | |
b8698a0f | 3253 | each case label. |
726a989a RB |
3254 | |
3255 | TODO: once the switch expansion logic is sufficiently separated, we can | |
3256 | do better job on estimating cost of the switch. */ | |
02f0b13a JH |
3257 | if (weights->time_based) |
3258 | cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2; | |
3259 | else | |
3260 | cost = gimple_switch_num_labels (stmt) * 2; | |
6de9cd9a | 3261 | break; |
726a989a RB |
3262 | |
3263 | case GIMPLE_CALL: | |
6de9cd9a | 3264 | { |
726a989a RB |
3265 | tree decl = gimple_call_fndecl (stmt); |
3266 | tree addr = gimple_call_fn (stmt); | |
8723e2fe JH |
3267 | tree funtype = TREE_TYPE (addr); |
3268 | ||
726a989a RB |
3269 | if (POINTER_TYPE_P (funtype)) |
3270 | funtype = TREE_TYPE (funtype); | |
6de9cd9a | 3271 | |
625a2efb | 3272 | if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD) |
726a989a | 3273 | cost = weights->target_builtin_call_cost; |
625a2efb | 3274 | else |
726a989a | 3275 | cost = weights->call_cost; |
b8698a0f | 3276 | |
8c96cd51 | 3277 | if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL) |
6de9cd9a DN |
3278 | switch (DECL_FUNCTION_CODE (decl)) |
3279 | { | |
d89488ec | 3280 | /* Builtins that expand to constants. */ |
6de9cd9a | 3281 | case BUILT_IN_CONSTANT_P: |
6de9cd9a | 3282 | case BUILT_IN_EXPECT: |
d89488ec JH |
3283 | case BUILT_IN_OBJECT_SIZE: |
3284 | case BUILT_IN_UNREACHABLE: | |
3285 | /* Simple register moves or loads from stack. */ | |
3286 | case BUILT_IN_RETURN_ADDRESS: | |
3287 | case BUILT_IN_EXTRACT_RETURN_ADDR: | |
3288 | case BUILT_IN_FROB_RETURN_ADDR: | |
3289 | case BUILT_IN_RETURN: | |
3290 | case BUILT_IN_AGGREGATE_INCOMING_ADDRESS: | |
3291 | case BUILT_IN_FRAME_ADDRESS: | |
3292 | case BUILT_IN_VA_END: | |
3293 | case BUILT_IN_STACK_SAVE: | |
3294 | case BUILT_IN_STACK_RESTORE: | |
be2fd187 RG |
3295 | /* Exception state returns or moves registers around. */ |
3296 | case BUILT_IN_EH_FILTER: | |
3297 | case BUILT_IN_EH_POINTER: | |
3298 | case BUILT_IN_EH_COPY_VALUES: | |
3299 | return 0; | |
d89488ec JH |
3300 | |
3301 | /* builtins that are not expensive (that is they are most probably | |
3302 | expanded inline into resonably simple code). */ | |
3303 | case BUILT_IN_ABS: | |
3304 | case BUILT_IN_ALLOCA: | |
3305 | case BUILT_IN_BSWAP32: | |
3306 | case BUILT_IN_BSWAP64: | |
3307 | case BUILT_IN_CLZ: | |
3308 | case BUILT_IN_CLZIMAX: | |
3309 | case BUILT_IN_CLZL: | |
3310 | case BUILT_IN_CLZLL: | |
3311 | case BUILT_IN_CTZ: | |
3312 | case BUILT_IN_CTZIMAX: | |
3313 | case BUILT_IN_CTZL: | |
3314 | case BUILT_IN_CTZLL: | |
3315 | case BUILT_IN_FFS: | |
3316 | case BUILT_IN_FFSIMAX: | |
3317 | case BUILT_IN_FFSL: | |
3318 | case BUILT_IN_FFSLL: | |
3319 | case BUILT_IN_IMAXABS: | |
3320 | case BUILT_IN_FINITE: | |
3321 | case BUILT_IN_FINITEF: | |
3322 | case BUILT_IN_FINITEL: | |
3323 | case BUILT_IN_FINITED32: | |
3324 | case BUILT_IN_FINITED64: | |
3325 | case BUILT_IN_FINITED128: | |
3326 | case BUILT_IN_FPCLASSIFY: | |
3327 | case BUILT_IN_ISFINITE: | |
3328 | case BUILT_IN_ISINF_SIGN: | |
3329 | case BUILT_IN_ISINF: | |
3330 | case BUILT_IN_ISINFF: | |
3331 | case BUILT_IN_ISINFL: | |
3332 | case BUILT_IN_ISINFD32: | |
3333 | case BUILT_IN_ISINFD64: | |
3334 | case BUILT_IN_ISINFD128: | |
3335 | case BUILT_IN_ISNAN: | |
3336 | case BUILT_IN_ISNANF: | |
3337 | case BUILT_IN_ISNANL: | |
3338 | case BUILT_IN_ISNAND32: | |
3339 | case BUILT_IN_ISNAND64: | |
3340 | case BUILT_IN_ISNAND128: | |
3341 | case BUILT_IN_ISNORMAL: | |
3342 | case BUILT_IN_ISGREATER: | |
3343 | case BUILT_IN_ISGREATEREQUAL: | |
3344 | case BUILT_IN_ISLESS: | |
3345 | case BUILT_IN_ISLESSEQUAL: | |
3346 | case BUILT_IN_ISLESSGREATER: | |
3347 | case BUILT_IN_ISUNORDERED: | |
3348 | case BUILT_IN_VA_ARG_PACK: | |
3349 | case BUILT_IN_VA_ARG_PACK_LEN: | |
3350 | case BUILT_IN_VA_COPY: | |
3351 | case BUILT_IN_TRAP: | |
3352 | case BUILT_IN_SAVEREGS: | |
3353 | case BUILT_IN_POPCOUNTL: | |
3354 | case BUILT_IN_POPCOUNTLL: | |
3355 | case BUILT_IN_POPCOUNTIMAX: | |
3356 | case BUILT_IN_POPCOUNT: | |
3357 | case BUILT_IN_PARITYL: | |
3358 | case BUILT_IN_PARITYLL: | |
3359 | case BUILT_IN_PARITYIMAX: | |
3360 | case BUILT_IN_PARITY: | |
3361 | case BUILT_IN_LABS: | |
3362 | case BUILT_IN_LLABS: | |
3363 | case BUILT_IN_PREFETCH: | |
3364 | cost = weights->target_builtin_call_cost; | |
3365 | break; | |
be2fd187 | 3366 | |
6de9cd9a DN |
3367 | default: |
3368 | break; | |
3369 | } | |
e5c4f28a | 3370 | |
8723e2fe JH |
3371 | if (decl) |
3372 | funtype = TREE_TYPE (decl); | |
3373 | ||
02f0b13a JH |
3374 | if (!VOID_TYPE_P (TREE_TYPE (funtype))) |
3375 | cost += estimate_move_cost (TREE_TYPE (funtype)); | |
726a989a RB |
3376 | /* Our cost must be kept in sync with |
3377 | cgraph_estimate_size_after_inlining that does use function | |
3378 | declaration to figure out the arguments. */ | |
8723e2fe JH |
3379 | if (decl && DECL_ARGUMENTS (decl)) |
3380 | { | |
3381 | tree arg; | |
3382 | for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg)) | |
078c3644 JH |
3383 | if (!VOID_TYPE_P (TREE_TYPE (arg))) |
3384 | cost += estimate_move_cost (TREE_TYPE (arg)); | |
8723e2fe JH |
3385 | } |
3386 | else if (funtype && prototype_p (funtype)) | |
3387 | { | |
3388 | tree t; | |
078c3644 JH |
3389 | for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node; |
3390 | t = TREE_CHAIN (t)) | |
3391 | if (!VOID_TYPE_P (TREE_VALUE (t))) | |
3392 | cost += estimate_move_cost (TREE_VALUE (t)); | |
8723e2fe JH |
3393 | } |
3394 | else | |
c7f599d0 | 3395 | { |
726a989a RB |
3396 | for (i = 0; i < gimple_call_num_args (stmt); i++) |
3397 | { | |
3398 | tree arg = gimple_call_arg (stmt, i); | |
078c3644 JH |
3399 | if (!VOID_TYPE_P (TREE_TYPE (arg))) |
3400 | cost += estimate_move_cost (TREE_TYPE (arg)); | |
726a989a | 3401 | } |
c7f599d0 | 3402 | } |
e5c4f28a | 3403 | |
6de9cd9a DN |
3404 | break; |
3405 | } | |
88f4034b | 3406 | |
726a989a RB |
3407 | case GIMPLE_GOTO: |
3408 | case GIMPLE_LABEL: | |
3409 | case GIMPLE_NOP: | |
3410 | case GIMPLE_PHI: | |
3411 | case GIMPLE_RETURN: | |
726a989a | 3412 | case GIMPLE_PREDICT: |
b5b8b0ac | 3413 | case GIMPLE_DEBUG: |
726a989a RB |
3414 | return 0; |
3415 | ||
3416 | case GIMPLE_ASM: | |
2bd1d2c8 | 3417 | return asm_str_count (gimple_asm_string (stmt)); |
726a989a | 3418 | |
1d65f45c RH |
3419 | case GIMPLE_RESX: |
3420 | /* This is either going to be an external function call with one | |
3421 | argument, or two register copy statements plus a goto. */ | |
3422 | return 2; | |
3423 | ||
3424 | case GIMPLE_EH_DISPATCH: | |
3425 | /* ??? This is going to turn into a switch statement. Ideally | |
3426 | we'd have a look at the eh region and estimate the number of | |
3427 | edges involved. */ | |
3428 | return 10; | |
3429 | ||
726a989a RB |
3430 | case GIMPLE_BIND: |
3431 | return estimate_num_insns_seq (gimple_bind_body (stmt), weights); | |
3432 | ||
3433 | case GIMPLE_EH_FILTER: | |
3434 | return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights); | |
3435 | ||
3436 | case GIMPLE_CATCH: | |
3437 | return estimate_num_insns_seq (gimple_catch_handler (stmt), weights); | |
3438 | ||
3439 | case GIMPLE_TRY: | |
3440 | return (estimate_num_insns_seq (gimple_try_eval (stmt), weights) | |
3441 | + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights)); | |
3442 | ||
3443 | /* OpenMP directives are generally very expensive. */ | |
3444 | ||
3445 | case GIMPLE_OMP_RETURN: | |
3446 | case GIMPLE_OMP_SECTIONS_SWITCH: | |
3447 | case GIMPLE_OMP_ATOMIC_STORE: | |
3448 | case GIMPLE_OMP_CONTINUE: | |
3449 | /* ...except these, which are cheap. */ | |
3450 | return 0; | |
3451 | ||
3452 | case GIMPLE_OMP_ATOMIC_LOAD: | |
3453 | return weights->omp_cost; | |
3454 | ||
3455 | case GIMPLE_OMP_FOR: | |
3456 | return (weights->omp_cost | |
3457 | + estimate_num_insns_seq (gimple_omp_body (stmt), weights) | |
3458 | + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights)); | |
3459 | ||
3460 | case GIMPLE_OMP_PARALLEL: | |
3461 | case GIMPLE_OMP_TASK: | |
3462 | case GIMPLE_OMP_CRITICAL: | |
3463 | case GIMPLE_OMP_MASTER: | |
3464 | case GIMPLE_OMP_ORDERED: | |
3465 | case GIMPLE_OMP_SECTION: | |
3466 | case GIMPLE_OMP_SECTIONS: | |
3467 | case GIMPLE_OMP_SINGLE: | |
3468 | return (weights->omp_cost | |
3469 | + estimate_num_insns_seq (gimple_omp_body (stmt), weights)); | |
88f4034b | 3470 | |
6de9cd9a | 3471 | default: |
1e128c5f | 3472 | gcc_unreachable (); |
6de9cd9a | 3473 | } |
726a989a RB |
3474 | |
3475 | return cost; | |
6de9cd9a DN |
3476 | } |
3477 | ||
726a989a RB |
3478 | /* Estimate number of instructions that will be created by expanding |
3479 | function FNDECL. WEIGHTS contains weights attributed to various | |
3480 | constructs. */ | |
aa4a53af | 3481 | |
6de9cd9a | 3482 | int |
726a989a | 3483 | estimate_num_insns_fn (tree fndecl, eni_weights *weights) |
6de9cd9a | 3484 | { |
726a989a RB |
3485 | struct function *my_function = DECL_STRUCT_FUNCTION (fndecl); |
3486 | gimple_stmt_iterator bsi; | |
e21aff8a | 3487 | basic_block bb; |
726a989a | 3488 | int n = 0; |
e21aff8a | 3489 | |
726a989a RB |
3490 | gcc_assert (my_function && my_function->cfg); |
3491 | FOR_EACH_BB_FN (bb, my_function) | |
e21aff8a | 3492 | { |
726a989a RB |
3493 | for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) |
3494 | n += estimate_num_insns (gsi_stmt (bsi), weights); | |
e21aff8a | 3495 | } |
e21aff8a | 3496 | |
726a989a | 3497 | return n; |
7f9bc51b ZD |
3498 | } |
3499 | ||
726a989a | 3500 | |
7f9bc51b ZD |
3501 | /* Initializes weights used by estimate_num_insns. */ |
3502 | ||
3503 | void | |
3504 | init_inline_once (void) | |
3505 | { | |
7f9bc51b | 3506 | eni_size_weights.call_cost = 1; |
625a2efb | 3507 | eni_size_weights.target_builtin_call_cost = 1; |
7f9bc51b | 3508 | eni_size_weights.div_mod_cost = 1; |
7f9bc51b | 3509 | eni_size_weights.omp_cost = 40; |
02f0b13a | 3510 | eni_size_weights.time_based = false; |
7f9bc51b ZD |
3511 | |
3512 | /* Estimating time for call is difficult, since we have no idea what the | |
3513 | called function does. In the current uses of eni_time_weights, | |
3514 | underestimating the cost does less harm than overestimating it, so | |
ea2c620c | 3515 | we choose a rather small value here. */ |
7f9bc51b | 3516 | eni_time_weights.call_cost = 10; |
625a2efb | 3517 | eni_time_weights.target_builtin_call_cost = 10; |
7f9bc51b | 3518 | eni_time_weights.div_mod_cost = 10; |
7f9bc51b | 3519 | eni_time_weights.omp_cost = 40; |
02f0b13a | 3520 | eni_time_weights.time_based = true; |
6de9cd9a DN |
3521 | } |
3522 | ||
726a989a RB |
3523 | /* Estimate the number of instructions in a gimple_seq. */ |
3524 | ||
3525 | int | |
3526 | count_insns_seq (gimple_seq seq, eni_weights *weights) | |
3527 | { | |
3528 | gimple_stmt_iterator gsi; | |
3529 | int n = 0; | |
3530 | for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3531 | n += estimate_num_insns (gsi_stmt (gsi), weights); | |
3532 | ||
3533 | return n; | |
3534 | } | |
3535 | ||
3536 | ||
e21aff8a | 3537 | /* Install new lexical TREE_BLOCK underneath 'current_block'. */ |
726a989a | 3538 | |
e21aff8a | 3539 | static void |
4a283090 | 3540 | prepend_lexical_block (tree current_block, tree new_block) |
e21aff8a | 3541 | { |
4a283090 JH |
3542 | BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block); |
3543 | BLOCK_SUBBLOCKS (current_block) = new_block; | |
e21aff8a | 3544 | BLOCK_SUPERCONTEXT (new_block) = current_block; |
e21aff8a SB |
3545 | } |
3546 | ||
3e293154 MJ |
3547 | /* Fetch callee declaration from the call graph edge going from NODE and |
3548 | associated with STMR call statement. Return NULL_TREE if not found. */ | |
3549 | static tree | |
726a989a | 3550 | get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt) |
3e293154 MJ |
3551 | { |
3552 | struct cgraph_edge *cs; | |
3553 | ||
3554 | cs = cgraph_edge (node, stmt); | |
e33c6cd6 | 3555 | if (cs && !cs->indirect_unknown_callee) |
3e293154 MJ |
3556 | return cs->callee->decl; |
3557 | ||
3558 | return NULL_TREE; | |
3559 | } | |
3560 | ||
726a989a | 3561 | /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */ |
d4e4baa9 | 3562 | |
e21aff8a | 3563 | static bool |
726a989a | 3564 | expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id) |
d4e4baa9 | 3565 | { |
0f900dfa | 3566 | tree use_retvar; |
d436bff8 | 3567 | tree fn; |
b5b8b0ac | 3568 | struct pointer_map_t *st, *dst; |
110cfe1c | 3569 | tree return_slot; |
7740f00d | 3570 | tree modify_dest; |
6de9cd9a | 3571 | location_t saved_location; |
e21aff8a | 3572 | struct cgraph_edge *cg_edge; |
61a05df1 | 3573 | cgraph_inline_failed_t reason; |
e21aff8a SB |
3574 | basic_block return_block; |
3575 | edge e; | |
726a989a | 3576 | gimple_stmt_iterator gsi, stmt_gsi; |
e21aff8a | 3577 | bool successfully_inlined = FALSE; |
4f6c2131 | 3578 | bool purge_dead_abnormal_edges; |
e21aff8a SB |
3579 | tree t_step; |
3580 | tree var; | |
d4e4baa9 | 3581 | |
6de9cd9a DN |
3582 | /* Set input_location here so we get the right instantiation context |
3583 | if we call instantiate_decl from inlinable_function_p. */ | |
3584 | saved_location = input_location; | |
726a989a RB |
3585 | if (gimple_has_location (stmt)) |
3586 | input_location = gimple_location (stmt); | |
6de9cd9a | 3587 | |
d4e4baa9 | 3588 | /* From here on, we're only interested in CALL_EXPRs. */ |
726a989a | 3589 | if (gimple_code (stmt) != GIMPLE_CALL) |
6de9cd9a | 3590 | goto egress; |
d4e4baa9 AO |
3591 | |
3592 | /* First, see if we can figure out what function is being called. | |
3593 | If we cannot, then there is no hope of inlining the function. */ | |
726a989a | 3594 | fn = gimple_call_fndecl (stmt); |
d4e4baa9 | 3595 | if (!fn) |
3e293154 MJ |
3596 | { |
3597 | fn = get_indirect_callee_fndecl (id->dst_node, stmt); | |
3598 | if (!fn) | |
3599 | goto egress; | |
3600 | } | |
d4e4baa9 | 3601 | |
b58b1157 | 3602 | /* Turn forward declarations into real ones. */ |
d4d1ebc1 | 3603 | fn = cgraph_node (fn)->decl; |
b58b1157 | 3604 | |
726a989a | 3605 | /* If FN is a declaration of a function in a nested scope that was |
a1a0fd4e AO |
3606 | globally declared inline, we don't set its DECL_INITIAL. |
3607 | However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the | |
3608 | C++ front-end uses it for cdtors to refer to their internal | |
3609 | declarations, that are not real functions. Fortunately those | |
3610 | don't have trees to be saved, so we can tell by checking their | |
726a989a RB |
3611 | gimple_body. */ |
3612 | if (!DECL_INITIAL (fn) | |
a1a0fd4e | 3613 | && DECL_ABSTRACT_ORIGIN (fn) |
39ecc018 | 3614 | && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn))) |
a1a0fd4e AO |
3615 | fn = DECL_ABSTRACT_ORIGIN (fn); |
3616 | ||
18c6ada9 JH |
3617 | /* Objective C and fortran still calls tree_rest_of_compilation directly. |
3618 | Kill this check once this is fixed. */ | |
1b369fae | 3619 | if (!id->dst_node->analyzed) |
6de9cd9a | 3620 | goto egress; |
18c6ada9 | 3621 | |
1b369fae | 3622 | cg_edge = cgraph_edge (id->dst_node, stmt); |
18c6ada9 | 3623 | |
f9417da1 RG |
3624 | /* Don't inline functions with different EH personalities. */ |
3625 | if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl) | |
3626 | && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl) | |
3627 | && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl) | |
3628 | != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))) | |
3629 | goto egress; | |
3630 | ||
d4e4baa9 AO |
3631 | /* Don't try to inline functions that are not well-suited to |
3632 | inlining. */ | |
e21aff8a | 3633 | if (!cgraph_inline_p (cg_edge, &reason)) |
a833faa5 | 3634 | { |
3e293154 MJ |
3635 | /* If this call was originally indirect, we do not want to emit any |
3636 | inlining related warnings or sorry messages because there are no | |
3637 | guarantees regarding those. */ | |
e33c6cd6 | 3638 | if (cg_edge->indirect_inlining_edge) |
3e293154 MJ |
3639 | goto egress; |
3640 | ||
7fac66d4 JH |
3641 | if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)) |
3642 | /* Avoid warnings during early inline pass. */ | |
7e8b322a | 3643 | && cgraph_global_info_ready) |
2d327012 | 3644 | { |
61a05df1 JH |
3645 | sorry ("inlining failed in call to %q+F: %s", fn, |
3646 | cgraph_inline_failed_string (reason)); | |
2d327012 JH |
3647 | sorry ("called from here"); |
3648 | } | |
3649 | else if (warn_inline && DECL_DECLARED_INLINE_P (fn) | |
3650 | && !DECL_IN_SYSTEM_HEADER (fn) | |
61a05df1 | 3651 | && reason != CIF_UNSPECIFIED |
d63db217 JH |
3652 | && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)) |
3653 | /* Avoid warnings during early inline pass. */ | |
7e8b322a | 3654 | && cgraph_global_info_ready) |
a833faa5 | 3655 | { |
dee15844 | 3656 | warning (OPT_Winline, "inlining failed in call to %q+F: %s", |
61a05df1 | 3657 | fn, cgraph_inline_failed_string (reason)); |
3176a0c2 | 3658 | warning (OPT_Winline, "called from here"); |
a833faa5 | 3659 | } |
6de9cd9a | 3660 | goto egress; |
a833faa5 | 3661 | } |
ea99e0be | 3662 | fn = cg_edge->callee->decl; |
d4e4baa9 | 3663 | |
18c6ada9 | 3664 | #ifdef ENABLE_CHECKING |
1b369fae | 3665 | if (cg_edge->callee->decl != id->dst_node->decl) |
e21aff8a | 3666 | verify_cgraph_node (cg_edge->callee); |
18c6ada9 JH |
3667 | #endif |
3668 | ||
e21aff8a | 3669 | /* We will be inlining this callee. */ |
1d65f45c | 3670 | id->eh_lp_nr = lookup_stmt_eh_lp (stmt); |
e21aff8a | 3671 | |
f9417da1 RG |
3672 | /* Update the callers EH personality. */ |
3673 | if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)) | |
3674 | DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl) | |
3675 | = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl); | |
3676 | ||
726a989a | 3677 | /* Split the block holding the GIMPLE_CALL. */ |
e21aff8a SB |
3678 | e = split_block (bb, stmt); |
3679 | bb = e->src; | |
3680 | return_block = e->dest; | |
3681 | remove_edge (e); | |
3682 | ||
4f6c2131 EB |
3683 | /* split_block splits after the statement; work around this by |
3684 | moving the call into the second block manually. Not pretty, | |
3685 | but seems easier than doing the CFG manipulation by hand | |
726a989a RB |
3686 | when the GIMPLE_CALL is in the last statement of BB. */ |
3687 | stmt_gsi = gsi_last_bb (bb); | |
3688 | gsi_remove (&stmt_gsi, false); | |
4f6c2131 | 3689 | |
726a989a | 3690 | /* If the GIMPLE_CALL was in the last statement of BB, it may have |
4f6c2131 EB |
3691 | been the source of abnormal edges. In this case, schedule |
3692 | the removal of dead abnormal edges. */ | |
726a989a RB |
3693 | gsi = gsi_start_bb (return_block); |
3694 | if (gsi_end_p (gsi)) | |
e21aff8a | 3695 | { |
726a989a | 3696 | gsi_insert_after (&gsi, stmt, GSI_NEW_STMT); |
4f6c2131 | 3697 | purge_dead_abnormal_edges = true; |
e21aff8a | 3698 | } |
4f6c2131 EB |
3699 | else |
3700 | { | |
726a989a | 3701 | gsi_insert_before (&gsi, stmt, GSI_NEW_STMT); |
4f6c2131 EB |
3702 | purge_dead_abnormal_edges = false; |
3703 | } | |
3704 | ||
726a989a | 3705 | stmt_gsi = gsi_start_bb (return_block); |
742a37d5 | 3706 | |
d436bff8 AH |
3707 | /* Build a block containing code to initialize the arguments, the |
3708 | actual inline expansion of the body, and a label for the return | |
3709 | statements within the function to jump to. The type of the | |
3710 | statement expression is the return type of the function call. */ | |
e21aff8a SB |
3711 | id->block = make_node (BLOCK); |
3712 | BLOCK_ABSTRACT_ORIGIN (id->block) = fn; | |
3e2844cb | 3713 | BLOCK_SOURCE_LOCATION (id->block) = input_location; |
4a283090 | 3714 | prepend_lexical_block (gimple_block (stmt), id->block); |
e21aff8a | 3715 | |
d4e4baa9 AO |
3716 | /* Local declarations will be replaced by their equivalents in this |
3717 | map. */ | |
3718 | st = id->decl_map; | |
6be42dd4 | 3719 | id->decl_map = pointer_map_create (); |
b5b8b0ac AO |
3720 | dst = id->debug_map; |
3721 | id->debug_map = NULL; | |
d4e4baa9 | 3722 | |
e21aff8a | 3723 | /* Record the function we are about to inline. */ |
1b369fae RH |
3724 | id->src_fn = fn; |
3725 | id->src_node = cg_edge->callee; | |
110cfe1c | 3726 | id->src_cfun = DECL_STRUCT_FUNCTION (fn); |
726a989a | 3727 | id->gimple_call = stmt; |
1b369fae | 3728 | |
3c8da8a5 AO |
3729 | gcc_assert (!id->src_cfun->after_inlining); |
3730 | ||
045685a9 | 3731 | id->entry_bb = bb; |
7299cb99 JH |
3732 | if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn))) |
3733 | { | |
3734 | gimple_stmt_iterator si = gsi_last_bb (bb); | |
3735 | gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION, | |
3736 | NOT_TAKEN), | |
3737 | GSI_NEW_STMT); | |
3738 | } | |
726a989a | 3739 | initialize_inlined_parameters (id, stmt, fn, bb); |
d4e4baa9 | 3740 | |
ea99e0be | 3741 | if (DECL_INITIAL (fn)) |
4a283090 | 3742 | prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id)); |
acb8f212 | 3743 | |
d4e4baa9 AO |
3744 | /* Return statements in the function body will be replaced by jumps |
3745 | to the RET_LABEL. */ | |
1e128c5f GB |
3746 | gcc_assert (DECL_INITIAL (fn)); |
3747 | gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK); | |
23700f65 | 3748 | |
726a989a | 3749 | /* Find the LHS to which the result of this call is assigned. */ |
110cfe1c | 3750 | return_slot = NULL; |
726a989a | 3751 | if (gimple_call_lhs (stmt)) |
81bafd36 | 3752 | { |
726a989a | 3753 | modify_dest = gimple_call_lhs (stmt); |
81bafd36 ILT |
3754 | |
3755 | /* The function which we are inlining might not return a value, | |
3756 | in which case we should issue a warning that the function | |
3757 | does not return a value. In that case the optimizers will | |
3758 | see that the variable to which the value is assigned was not | |
3759 | initialized. We do not want to issue a warning about that | |
3760 | uninitialized variable. */ | |
3761 | if (DECL_P (modify_dest)) | |
3762 | TREE_NO_WARNING (modify_dest) = 1; | |
726a989a RB |
3763 | |
3764 | if (gimple_call_return_slot_opt_p (stmt)) | |
fa47911c | 3765 | { |
110cfe1c | 3766 | return_slot = modify_dest; |
fa47911c JM |
3767 | modify_dest = NULL; |
3768 | } | |
81bafd36 | 3769 | } |
7740f00d RH |
3770 | else |
3771 | modify_dest = NULL; | |
3772 | ||
1ea193c2 ILT |
3773 | /* If we are inlining a call to the C++ operator new, we don't want |
3774 | to use type based alias analysis on the return value. Otherwise | |
3775 | we may get confused if the compiler sees that the inlined new | |
3776 | function returns a pointer which was just deleted. See bug | |
3777 | 33407. */ | |
3778 | if (DECL_IS_OPERATOR_NEW (fn)) | |
3779 | { | |
3780 | return_slot = NULL; | |
3781 | modify_dest = NULL; | |
3782 | } | |
3783 | ||
d4e4baa9 | 3784 | /* Declare the return variable for the function. */ |
0f900dfa | 3785 | use_retvar = declare_return_variable (id, return_slot, modify_dest); |
1ea193c2 | 3786 | |
acb8f212 | 3787 | /* Add local vars in this inlined callee to caller. */ |
cb91fab0 | 3788 | t_step = id->src_cfun->local_decls; |
acb8f212 JH |
3789 | for (; t_step; t_step = TREE_CHAIN (t_step)) |
3790 | { | |
3791 | var = TREE_VALUE (t_step); | |
3792 | if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var)) | |
eb50f5f4 | 3793 | { |
65401a0b | 3794 | if (var_ann (var) && add_referenced_var (var)) |
eb50f5f4 JH |
3795 | cfun->local_decls = tree_cons (NULL_TREE, var, |
3796 | cfun->local_decls); | |
3797 | } | |
526d73ab JH |
3798 | else if (!can_be_nonlocal (var, id)) |
3799 | cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id), | |
3800 | cfun->local_decls); | |
acb8f212 JH |
3801 | } |
3802 | ||
0d63a740 JH |
3803 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3804 | { | |
3805 | fprintf (dump_file, "Inlining "); | |
b8698a0f | 3806 | print_generic_expr (dump_file, id->src_fn, 0); |
0d63a740 | 3807 | fprintf (dump_file, " to "); |
b8698a0f | 3808 | print_generic_expr (dump_file, id->dst_fn, 0); |
0d63a740 JH |
3809 | fprintf (dump_file, " with frequency %i\n", cg_edge->frequency); |
3810 | } | |
3811 | ||
eb50f5f4 JH |
3812 | /* This is it. Duplicate the callee body. Assume callee is |
3813 | pre-gimplified. Note that we must not alter the caller | |
3814 | function in any way before this point, as this CALL_EXPR may be | |
3815 | a self-referential call; if we're calling ourselves, we need to | |
3816 | duplicate our body before altering anything. */ | |
0d63a740 JH |
3817 | copy_body (id, bb->count, |
3818 | cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE, | |
3819 | bb, return_block); | |
eb50f5f4 | 3820 | |
d086d311 | 3821 | /* Reset the escaped solution. */ |
6b8ed145 | 3822 | if (cfun->gimple_df) |
d086d311 | 3823 | pt_solution_reset (&cfun->gimple_df->escaped); |
6b8ed145 | 3824 | |
d4e4baa9 | 3825 | /* Clean up. */ |
b5b8b0ac AO |
3826 | if (id->debug_map) |
3827 | { | |
3828 | pointer_map_destroy (id->debug_map); | |
3829 | id->debug_map = dst; | |
3830 | } | |
6be42dd4 | 3831 | pointer_map_destroy (id->decl_map); |
d4e4baa9 AO |
3832 | id->decl_map = st; |
3833 | ||
5006671f RG |
3834 | /* Unlink the calls virtual operands before replacing it. */ |
3835 | unlink_stmt_vdef (stmt); | |
3836 | ||
84936f6f | 3837 | /* If the inlined function returns a result that we care about, |
726a989a RB |
3838 | substitute the GIMPLE_CALL with an assignment of the return |
3839 | variable to the LHS of the call. That is, if STMT was | |
3840 | 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */ | |
3841 | if (use_retvar && gimple_call_lhs (stmt)) | |
e21aff8a | 3842 | { |
726a989a RB |
3843 | gimple old_stmt = stmt; |
3844 | stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar); | |
3845 | gsi_replace (&stmt_gsi, stmt, false); | |
110cfe1c | 3846 | if (gimple_in_ssa_p (cfun)) |
5006671f | 3847 | mark_symbols_for_renaming (stmt); |
726a989a | 3848 | maybe_clean_or_replace_eh_stmt (old_stmt, stmt); |
e21aff8a | 3849 | } |
6de9cd9a | 3850 | else |
110cfe1c | 3851 | { |
726a989a RB |
3852 | /* Handle the case of inlining a function with no return |
3853 | statement, which causes the return value to become undefined. */ | |
3854 | if (gimple_call_lhs (stmt) | |
3855 | && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME) | |
110cfe1c | 3856 | { |
726a989a RB |
3857 | tree name = gimple_call_lhs (stmt); |
3858 | tree var = SSA_NAME_VAR (name); | |
110cfe1c JH |
3859 | tree def = gimple_default_def (cfun, var); |
3860 | ||
110cfe1c JH |
3861 | if (def) |
3862 | { | |
726a989a RB |
3863 | /* If the variable is used undefined, make this name |
3864 | undefined via a move. */ | |
3865 | stmt = gimple_build_assign (gimple_call_lhs (stmt), def); | |
3866 | gsi_replace (&stmt_gsi, stmt, true); | |
110cfe1c | 3867 | } |
110cfe1c JH |
3868 | else |
3869 | { | |
726a989a RB |
3870 | /* Otherwise make this variable undefined. */ |
3871 | gsi_remove (&stmt_gsi, true); | |
110cfe1c | 3872 | set_default_def (var, name); |
726a989a | 3873 | SSA_NAME_DEF_STMT (name) = gimple_build_nop (); |
110cfe1c JH |
3874 | } |
3875 | } | |
3876 | else | |
726a989a | 3877 | gsi_remove (&stmt_gsi, true); |
110cfe1c | 3878 | } |
d4e4baa9 | 3879 | |
4f6c2131 | 3880 | if (purge_dead_abnormal_edges) |
726a989a | 3881 | gimple_purge_dead_abnormal_call_edges (return_block); |
84936f6f | 3882 | |
e21aff8a SB |
3883 | /* If the value of the new expression is ignored, that's OK. We |
3884 | don't warn about this for CALL_EXPRs, so we shouldn't warn about | |
3885 | the equivalent inlined version either. */ | |
726a989a RB |
3886 | if (is_gimple_assign (stmt)) |
3887 | { | |
3888 | gcc_assert (gimple_assign_single_p (stmt) | |
1a87cf0c | 3889 | || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))); |
726a989a RB |
3890 | TREE_USED (gimple_assign_rhs1 (stmt)) = 1; |
3891 | } | |
84936f6f | 3892 | |
1eb3331e DB |
3893 | /* Output the inlining info for this abstract function, since it has been |
3894 | inlined. If we don't do this now, we can lose the information about the | |
3895 | variables in the function when the blocks get blown away as soon as we | |
3896 | remove the cgraph node. */ | |
e21aff8a | 3897 | (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl); |
84936f6f | 3898 | |
e72fcfe8 | 3899 | /* Update callgraph if needed. */ |
e21aff8a | 3900 | cgraph_remove_node (cg_edge->callee); |
e72fcfe8 | 3901 | |
e21aff8a | 3902 | id->block = NULL_TREE; |
e21aff8a | 3903 | successfully_inlined = TRUE; |
742a37d5 | 3904 | |
6de9cd9a DN |
3905 | egress: |
3906 | input_location = saved_location; | |
e21aff8a | 3907 | return successfully_inlined; |
d4e4baa9 | 3908 | } |
6de9cd9a | 3909 | |
e21aff8a SB |
3910 | /* Expand call statements reachable from STMT_P. |
3911 | We can only have CALL_EXPRs as the "toplevel" tree code or nested | |
726a989a | 3912 | in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can |
e21aff8a SB |
3913 | unfortunately not use that function here because we need a pointer |
3914 | to the CALL_EXPR, not the tree itself. */ | |
3915 | ||
3916 | static bool | |
1b369fae | 3917 | gimple_expand_calls_inline (basic_block bb, copy_body_data *id) |
6de9cd9a | 3918 | { |
726a989a | 3919 | gimple_stmt_iterator gsi; |
6de9cd9a | 3920 | |
726a989a | 3921 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
6de9cd9a | 3922 | { |
726a989a | 3923 | gimple stmt = gsi_stmt (gsi); |
e21aff8a | 3924 | |
726a989a RB |
3925 | if (is_gimple_call (stmt) |
3926 | && expand_call_inline (bb, stmt, id)) | |
3927 | return true; | |
6de9cd9a | 3928 | } |
726a989a | 3929 | |
e21aff8a | 3930 | return false; |
6de9cd9a DN |
3931 | } |
3932 | ||
726a989a | 3933 | |
b8a00a4d JH |
3934 | /* Walk all basic blocks created after FIRST and try to fold every statement |
3935 | in the STATEMENTS pointer set. */ | |
726a989a | 3936 | |
b8a00a4d JH |
3937 | static void |
3938 | fold_marked_statements (int first, struct pointer_set_t *statements) | |
3939 | { | |
726a989a | 3940 | for (; first < n_basic_blocks; first++) |
b8a00a4d JH |
3941 | if (BASIC_BLOCK (first)) |
3942 | { | |
726a989a RB |
3943 | gimple_stmt_iterator gsi; |
3944 | ||
3945 | for (gsi = gsi_start_bb (BASIC_BLOCK (first)); | |
3946 | !gsi_end_p (gsi); | |
3947 | gsi_next (&gsi)) | |
3948 | if (pointer_set_contains (statements, gsi_stmt (gsi))) | |
9477eb38 | 3949 | { |
726a989a | 3950 | gimple old_stmt = gsi_stmt (gsi); |
4b685e14 | 3951 | tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0; |
2bafad93 | 3952 | |
44e10129 MM |
3953 | if (old_decl && DECL_BUILT_IN (old_decl)) |
3954 | { | |
3955 | /* Folding builtins can create multiple instructions, | |
3956 | we need to look at all of them. */ | |
3957 | gimple_stmt_iterator i2 = gsi; | |
3958 | gsi_prev (&i2); | |
3959 | if (fold_stmt (&gsi)) | |
3960 | { | |
3961 | gimple new_stmt; | |
3962 | if (gsi_end_p (i2)) | |
3963 | i2 = gsi_start_bb (BASIC_BLOCK (first)); | |
3964 | else | |
3965 | gsi_next (&i2); | |
3966 | while (1) | |
3967 | { | |
3968 | new_stmt = gsi_stmt (i2); | |
3969 | update_stmt (new_stmt); | |
3970 | cgraph_update_edges_for_call_stmt (old_stmt, old_decl, | |
3971 | new_stmt); | |
3972 | ||
3973 | if (new_stmt == gsi_stmt (gsi)) | |
3974 | { | |
3975 | /* It is okay to check only for the very last | |
3976 | of these statements. If it is a throwing | |
3977 | statement nothing will change. If it isn't | |
3978 | this can remove EH edges. If that weren't | |
3979 | correct then because some intermediate stmts | |
3980 | throw, but not the last one. That would mean | |
3981 | we'd have to split the block, which we can't | |
3982 | here and we'd loose anyway. And as builtins | |
3983 | probably never throw, this all | |
3984 | is mood anyway. */ | |
3985 | if (maybe_clean_or_replace_eh_stmt (old_stmt, | |
3986 | new_stmt)) | |
3987 | gimple_purge_dead_eh_edges (BASIC_BLOCK (first)); | |
3988 | break; | |
3989 | } | |
3990 | gsi_next (&i2); | |
3991 | } | |
3992 | } | |
3993 | } | |
3994 | else if (fold_stmt (&gsi)) | |
9477eb38 | 3995 | { |
726a989a RB |
3996 | /* Re-read the statement from GSI as fold_stmt() may |
3997 | have changed it. */ | |
3998 | gimple new_stmt = gsi_stmt (gsi); | |
3999 | update_stmt (new_stmt); | |
4000 | ||
4b685e14 JH |
4001 | if (is_gimple_call (old_stmt) |
4002 | || is_gimple_call (new_stmt)) | |
44e10129 MM |
4003 | cgraph_update_edges_for_call_stmt (old_stmt, old_decl, |
4004 | new_stmt); | |
726a989a RB |
4005 | |
4006 | if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt)) | |
4007 | gimple_purge_dead_eh_edges (BASIC_BLOCK (first)); | |
9477eb38 JH |
4008 | } |
4009 | } | |
b8a00a4d JH |
4010 | } |
4011 | } | |
4012 | ||
1084e689 JH |
4013 | /* Return true if BB has at least one abnormal outgoing edge. */ |
4014 | ||
4015 | static inline bool | |
4016 | has_abnormal_outgoing_edge_p (basic_block bb) | |
4017 | { | |
4018 | edge e; | |
4019 | edge_iterator ei; | |
4020 | ||
4021 | FOR_EACH_EDGE (e, ei, bb->succs) | |
4022 | if (e->flags & EDGE_ABNORMAL) | |
4023 | return true; | |
4024 | ||
4025 | return false; | |
4026 | } | |
4027 | ||
d4e4baa9 AO |
4028 | /* Expand calls to inline functions in the body of FN. */ |
4029 | ||
873aa8f5 | 4030 | unsigned int |
46c5ad27 | 4031 | optimize_inline_calls (tree fn) |
d4e4baa9 | 4032 | { |
1b369fae | 4033 | copy_body_data id; |
e21aff8a | 4034 | basic_block bb; |
b8a00a4d | 4035 | int last = n_basic_blocks; |
d406b663 JJ |
4036 | struct gimplify_ctx gctx; |
4037 | ||
c5b6f18e MM |
4038 | /* There is no point in performing inlining if errors have already |
4039 | occurred -- and we might crash if we try to inline invalid | |
4040 | code. */ | |
4041 | if (errorcount || sorrycount) | |
873aa8f5 | 4042 | return 0; |
c5b6f18e | 4043 | |
d4e4baa9 AO |
4044 | /* Clear out ID. */ |
4045 | memset (&id, 0, sizeof (id)); | |
4046 | ||
1b369fae RH |
4047 | id.src_node = id.dst_node = cgraph_node (fn); |
4048 | id.dst_fn = fn; | |
d4e4baa9 | 4049 | /* Or any functions that aren't finished yet. */ |
d4e4baa9 | 4050 | if (current_function_decl) |
0f900dfa | 4051 | id.dst_fn = current_function_decl; |
1b369fae RH |
4052 | |
4053 | id.copy_decl = copy_decl_maybe_to_var; | |
4054 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
4055 | id.transform_new_cfg = false; | |
4056 | id.transform_return_to_modify = true; | |
9ff420f1 | 4057 | id.transform_lang_insert_block = NULL; |
b8a00a4d | 4058 | id.statements_to_fold = pointer_set_create (); |
1b369fae | 4059 | |
d406b663 | 4060 | push_gimplify_context (&gctx); |
d4e4baa9 | 4061 | |
672987e8 ZD |
4062 | /* We make no attempts to keep dominance info up-to-date. */ |
4063 | free_dominance_info (CDI_DOMINATORS); | |
4064 | free_dominance_info (CDI_POST_DOMINATORS); | |
4065 | ||
726a989a RB |
4066 | /* Register specific gimple functions. */ |
4067 | gimple_register_cfg_hooks (); | |
4068 | ||
e21aff8a SB |
4069 | /* Reach the trees by walking over the CFG, and note the |
4070 | enclosing basic-blocks in the call edges. */ | |
4071 | /* We walk the blocks going forward, because inlined function bodies | |
4072 | will split id->current_basic_block, and the new blocks will | |
4073 | follow it; we'll trudge through them, processing their CALL_EXPRs | |
4074 | along the way. */ | |
4075 | FOR_EACH_BB (bb) | |
4076 | gimple_expand_calls_inline (bb, &id); | |
d4e4baa9 | 4077 | |
e21aff8a | 4078 | pop_gimplify_context (NULL); |
6de9cd9a | 4079 | |
18c6ada9 JH |
4080 | #ifdef ENABLE_CHECKING |
4081 | { | |
4082 | struct cgraph_edge *e; | |
4083 | ||
1b369fae | 4084 | verify_cgraph_node (id.dst_node); |
18c6ada9 JH |
4085 | |
4086 | /* Double check that we inlined everything we are supposed to inline. */ | |
1b369fae | 4087 | for (e = id.dst_node->callees; e; e = e->next_callee) |
1e128c5f | 4088 | gcc_assert (e->inline_failed); |
18c6ada9 JH |
4089 | } |
4090 | #endif | |
b8698a0f | 4091 | |
a9eafe81 AP |
4092 | /* Fold the statements before compacting/renumbering the basic blocks. */ |
4093 | fold_marked_statements (last, id.statements_to_fold); | |
4094 | pointer_set_destroy (id.statements_to_fold); | |
b8698a0f | 4095 | |
b5b8b0ac AO |
4096 | gcc_assert (!id.debug_stmts); |
4097 | ||
a9eafe81 AP |
4098 | /* Renumber the (code) basic_blocks consecutively. */ |
4099 | compact_blocks (); | |
4100 | /* Renumber the lexical scoping (non-code) blocks consecutively. */ | |
4101 | number_blocks (fn); | |
b8a00a4d | 4102 | |
873aa8f5 | 4103 | fold_cond_expr_cond (); |
078c3644 JH |
4104 | delete_unreachable_blocks_update_callgraph (&id); |
4105 | #ifdef ENABLE_CHECKING | |
4106 | verify_cgraph_node (id.dst_node); | |
4107 | #endif | |
726a989a | 4108 | |
110cfe1c JH |
4109 | /* It would be nice to check SSA/CFG/statement consistency here, but it is |
4110 | not possible yet - the IPA passes might make various functions to not | |
4111 | throw and they don't care to proactively update local EH info. This is | |
4112 | done later in fixup_cfg pass that also execute the verification. */ | |
726a989a RB |
4113 | return (TODO_update_ssa |
4114 | | TODO_cleanup_cfg | |
45a80bb9 JH |
4115 | | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0) |
4116 | | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0)); | |
d4e4baa9 AO |
4117 | } |
4118 | ||
d4e4baa9 AO |
4119 | /* Passed to walk_tree. Copies the node pointed to, if appropriate. */ |
4120 | ||
4121 | tree | |
46c5ad27 | 4122 | copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
d4e4baa9 AO |
4123 | { |
4124 | enum tree_code code = TREE_CODE (*tp); | |
07beea0d | 4125 | enum tree_code_class cl = TREE_CODE_CLASS (code); |
d4e4baa9 AO |
4126 | |
4127 | /* We make copies of most nodes. */ | |
07beea0d | 4128 | if (IS_EXPR_CODE_CLASS (cl) |
d4e4baa9 AO |
4129 | || code == TREE_LIST |
4130 | || code == TREE_VEC | |
8843c120 DN |
4131 | || code == TYPE_DECL |
4132 | || code == OMP_CLAUSE) | |
d4e4baa9 AO |
4133 | { |
4134 | /* Because the chain gets clobbered when we make a copy, we save it | |
4135 | here. */ | |
82d6e6fc | 4136 | tree chain = NULL_TREE, new_tree; |
07beea0d | 4137 | |
726a989a | 4138 | chain = TREE_CHAIN (*tp); |
d4e4baa9 AO |
4139 | |
4140 | /* Copy the node. */ | |
82d6e6fc | 4141 | new_tree = copy_node (*tp); |
6de9cd9a DN |
4142 | |
4143 | /* Propagate mudflap marked-ness. */ | |
4144 | if (flag_mudflap && mf_marked_p (*tp)) | |
82d6e6fc | 4145 | mf_mark (new_tree); |
6de9cd9a | 4146 | |
82d6e6fc | 4147 | *tp = new_tree; |
d4e4baa9 AO |
4148 | |
4149 | /* Now, restore the chain, if appropriate. That will cause | |
4150 | walk_tree to walk into the chain as well. */ | |
50674e96 DN |
4151 | if (code == PARM_DECL |
4152 | || code == TREE_LIST | |
aaf46ef9 | 4153 | || code == OMP_CLAUSE) |
d4e4baa9 AO |
4154 | TREE_CHAIN (*tp) = chain; |
4155 | ||
4156 | /* For now, we don't update BLOCKs when we make copies. So, we | |
6de9cd9a DN |
4157 | have to nullify all BIND_EXPRs. */ |
4158 | if (TREE_CODE (*tp) == BIND_EXPR) | |
4159 | BIND_EXPR_BLOCK (*tp) = NULL_TREE; | |
d4e4baa9 | 4160 | } |
4038c495 GB |
4161 | else if (code == CONSTRUCTOR) |
4162 | { | |
4163 | /* CONSTRUCTOR nodes need special handling because | |
4164 | we need to duplicate the vector of elements. */ | |
82d6e6fc | 4165 | tree new_tree; |
4038c495 | 4166 | |
82d6e6fc | 4167 | new_tree = copy_node (*tp); |
4038c495 GB |
4168 | |
4169 | /* Propagate mudflap marked-ness. */ | |
4170 | if (flag_mudflap && mf_marked_p (*tp)) | |
82d6e6fc | 4171 | mf_mark (new_tree); |
9f63daea | 4172 | |
82d6e6fc | 4173 | CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc, |
4038c495 | 4174 | CONSTRUCTOR_ELTS (*tp)); |
82d6e6fc | 4175 | *tp = new_tree; |
4038c495 | 4176 | } |
6615c446 | 4177 | else if (TREE_CODE_CLASS (code) == tcc_type) |
d4e4baa9 | 4178 | *walk_subtrees = 0; |
6615c446 | 4179 | else if (TREE_CODE_CLASS (code) == tcc_declaration) |
6de9cd9a | 4180 | *walk_subtrees = 0; |
a396f8ae GK |
4181 | else if (TREE_CODE_CLASS (code) == tcc_constant) |
4182 | *walk_subtrees = 0; | |
1e128c5f GB |
4183 | else |
4184 | gcc_assert (code != STATEMENT_LIST); | |
d4e4baa9 AO |
4185 | return NULL_TREE; |
4186 | } | |
4187 | ||
4188 | /* The SAVE_EXPR pointed to by TP is being copied. If ST contains | |
aa4a53af | 4189 | information indicating to what new SAVE_EXPR this one should be mapped, |
e21aff8a SB |
4190 | use that one. Otherwise, create a new node and enter it in ST. FN is |
4191 | the function into which the copy will be placed. */ | |
d4e4baa9 | 4192 | |
892c7e1e | 4193 | static void |
82c82743 | 4194 | remap_save_expr (tree *tp, void *st_, int *walk_subtrees) |
d4e4baa9 | 4195 | { |
6be42dd4 RG |
4196 | struct pointer_map_t *st = (struct pointer_map_t *) st_; |
4197 | tree *n; | |
5e20bdd7 | 4198 | tree t; |
d4e4baa9 AO |
4199 | |
4200 | /* See if we already encountered this SAVE_EXPR. */ | |
6be42dd4 | 4201 | n = (tree *) pointer_map_contains (st, *tp); |
d92b4486 | 4202 | |
d4e4baa9 AO |
4203 | /* If we didn't already remap this SAVE_EXPR, do so now. */ |
4204 | if (!n) | |
4205 | { | |
5e20bdd7 | 4206 | t = copy_node (*tp); |
d4e4baa9 | 4207 | |
d4e4baa9 | 4208 | /* Remember this SAVE_EXPR. */ |
6be42dd4 | 4209 | *pointer_map_insert (st, *tp) = t; |
350ebd54 | 4210 | /* Make sure we don't remap an already-remapped SAVE_EXPR. */ |
6be42dd4 | 4211 | *pointer_map_insert (st, t) = t; |
d4e4baa9 AO |
4212 | } |
4213 | else | |
5e20bdd7 JZ |
4214 | { |
4215 | /* We've already walked into this SAVE_EXPR; don't do it again. */ | |
4216 | *walk_subtrees = 0; | |
6be42dd4 | 4217 | t = *n; |
5e20bdd7 | 4218 | } |
d4e4baa9 AO |
4219 | |
4220 | /* Replace this SAVE_EXPR with the copy. */ | |
5e20bdd7 | 4221 | *tp = t; |
d4e4baa9 | 4222 | } |
d436bff8 | 4223 | |
aa4a53af RK |
4224 | /* Called via walk_tree. If *TP points to a DECL_STMT for a local label, |
4225 | copies the declaration and enters it in the splay_tree in DATA (which is | |
1b369fae | 4226 | really an `copy_body_data *'). */ |
6de9cd9a DN |
4227 | |
4228 | static tree | |
4229 | mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, | |
4230 | void *data) | |
4231 | { | |
1b369fae | 4232 | copy_body_data *id = (copy_body_data *) data; |
6de9cd9a DN |
4233 | |
4234 | /* Don't walk into types. */ | |
350fae66 RK |
4235 | if (TYPE_P (*tp)) |
4236 | *walk_subtrees = 0; | |
6de9cd9a | 4237 | |
350fae66 | 4238 | else if (TREE_CODE (*tp) == LABEL_EXPR) |
6de9cd9a | 4239 | { |
350fae66 | 4240 | tree decl = TREE_OPERAND (*tp, 0); |
6de9cd9a | 4241 | |
350fae66 | 4242 | /* Copy the decl and remember the copy. */ |
1b369fae | 4243 | insert_decl_map (id, decl, id->copy_decl (decl, id)); |
6de9cd9a DN |
4244 | } |
4245 | ||
4246 | return NULL_TREE; | |
4247 | } | |
4248 | ||
19114537 EC |
4249 | /* Perform any modifications to EXPR required when it is unsaved. Does |
4250 | not recurse into EXPR's subtrees. */ | |
4251 | ||
4252 | static void | |
4253 | unsave_expr_1 (tree expr) | |
4254 | { | |
4255 | switch (TREE_CODE (expr)) | |
4256 | { | |
4257 | case TARGET_EXPR: | |
4258 | /* Don't mess with a TARGET_EXPR that hasn't been expanded. | |
4259 | It's OK for this to happen if it was part of a subtree that | |
4260 | isn't immediately expanded, such as operand 2 of another | |
4261 | TARGET_EXPR. */ | |
4262 | if (TREE_OPERAND (expr, 1)) | |
4263 | break; | |
4264 | ||
4265 | TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3); | |
4266 | TREE_OPERAND (expr, 3) = NULL_TREE; | |
4267 | break; | |
4268 | ||
4269 | default: | |
4270 | break; | |
4271 | } | |
4272 | } | |
4273 | ||
6de9cd9a DN |
4274 | /* Called via walk_tree when an expression is unsaved. Using the |
4275 | splay_tree pointed to by ST (which is really a `splay_tree'), | |
4276 | remaps all local declarations to appropriate replacements. */ | |
d436bff8 AH |
4277 | |
4278 | static tree | |
6de9cd9a | 4279 | unsave_r (tree *tp, int *walk_subtrees, void *data) |
d436bff8 | 4280 | { |
1b369fae | 4281 | copy_body_data *id = (copy_body_data *) data; |
6be42dd4 RG |
4282 | struct pointer_map_t *st = id->decl_map; |
4283 | tree *n; | |
6de9cd9a DN |
4284 | |
4285 | /* Only a local declaration (variable or label). */ | |
4286 | if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp)) | |
4287 | || TREE_CODE (*tp) == LABEL_DECL) | |
4288 | { | |
4289 | /* Lookup the declaration. */ | |
6be42dd4 | 4290 | n = (tree *) pointer_map_contains (st, *tp); |
9f63daea | 4291 | |
6de9cd9a DN |
4292 | /* If it's there, remap it. */ |
4293 | if (n) | |
6be42dd4 | 4294 | *tp = *n; |
6de9cd9a | 4295 | } |
aa4a53af | 4296 | |
6de9cd9a | 4297 | else if (TREE_CODE (*tp) == STATEMENT_LIST) |
726a989a | 4298 | gcc_unreachable (); |
6de9cd9a DN |
4299 | else if (TREE_CODE (*tp) == BIND_EXPR) |
4300 | copy_bind_expr (tp, walk_subtrees, id); | |
a406865a RG |
4301 | else if (TREE_CODE (*tp) == SAVE_EXPR |
4302 | || TREE_CODE (*tp) == TARGET_EXPR) | |
82c82743 | 4303 | remap_save_expr (tp, st, walk_subtrees); |
d436bff8 | 4304 | else |
6de9cd9a DN |
4305 | { |
4306 | copy_tree_r (tp, walk_subtrees, NULL); | |
4307 | ||
4308 | /* Do whatever unsaving is required. */ | |
4309 | unsave_expr_1 (*tp); | |
4310 | } | |
4311 | ||
4312 | /* Keep iterating. */ | |
4313 | return NULL_TREE; | |
d436bff8 AH |
4314 | } |
4315 | ||
19114537 EC |
4316 | /* Copies everything in EXPR and replaces variables, labels |
4317 | and SAVE_EXPRs local to EXPR. */ | |
6de9cd9a DN |
4318 | |
4319 | tree | |
19114537 | 4320 | unsave_expr_now (tree expr) |
6de9cd9a | 4321 | { |
1b369fae | 4322 | copy_body_data id; |
6de9cd9a DN |
4323 | |
4324 | /* There's nothing to do for NULL_TREE. */ | |
4325 | if (expr == 0) | |
4326 | return expr; | |
4327 | ||
4328 | /* Set up ID. */ | |
4329 | memset (&id, 0, sizeof (id)); | |
1b369fae RH |
4330 | id.src_fn = current_function_decl; |
4331 | id.dst_fn = current_function_decl; | |
6be42dd4 | 4332 | id.decl_map = pointer_map_create (); |
b5b8b0ac | 4333 | id.debug_map = NULL; |
6de9cd9a | 4334 | |
1b369fae RH |
4335 | id.copy_decl = copy_decl_no_change; |
4336 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
4337 | id.transform_new_cfg = false; | |
4338 | id.transform_return_to_modify = false; | |
9ff420f1 | 4339 | id.transform_lang_insert_block = NULL; |
1b369fae | 4340 | |
6de9cd9a DN |
4341 | /* Walk the tree once to find local labels. */ |
4342 | walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id); | |
4343 | ||
4344 | /* Walk the tree again, copying, remapping, and unsaving. */ | |
4345 | walk_tree (&expr, unsave_r, &id, NULL); | |
4346 | ||
4347 | /* Clean up. */ | |
6be42dd4 | 4348 | pointer_map_destroy (id.decl_map); |
b5b8b0ac AO |
4349 | if (id.debug_map) |
4350 | pointer_map_destroy (id.debug_map); | |
6de9cd9a DN |
4351 | |
4352 | return expr; | |
4353 | } | |
4354 | ||
726a989a RB |
4355 | /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local |
4356 | label, copies the declaration and enters it in the splay_tree in DATA (which | |
4357 | is really a 'copy_body_data *'. */ | |
4358 | ||
4359 | static tree | |
4360 | mark_local_labels_stmt (gimple_stmt_iterator *gsip, | |
4361 | bool *handled_ops_p ATTRIBUTE_UNUSED, | |
4362 | struct walk_stmt_info *wi) | |
4363 | { | |
4364 | copy_body_data *id = (copy_body_data *) wi->info; | |
4365 | gimple stmt = gsi_stmt (*gsip); | |
4366 | ||
4367 | if (gimple_code (stmt) == GIMPLE_LABEL) | |
4368 | { | |
4369 | tree decl = gimple_label_label (stmt); | |
4370 | ||
4371 | /* Copy the decl and remember the copy. */ | |
4372 | insert_decl_map (id, decl, id->copy_decl (decl, id)); | |
4373 | } | |
4374 | ||
4375 | return NULL_TREE; | |
4376 | } | |
4377 | ||
4378 | ||
4379 | /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local. | |
4380 | Using the splay_tree pointed to by ST (which is really a `splay_tree'), | |
4381 | remaps all local declarations to appropriate replacements in gimple | |
4382 | operands. */ | |
4383 | ||
4384 | static tree | |
4385 | replace_locals_op (tree *tp, int *walk_subtrees, void *data) | |
4386 | { | |
4387 | struct walk_stmt_info *wi = (struct walk_stmt_info*) data; | |
4388 | copy_body_data *id = (copy_body_data *) wi->info; | |
4389 | struct pointer_map_t *st = id->decl_map; | |
4390 | tree *n; | |
4391 | tree expr = *tp; | |
4392 | ||
4393 | /* Only a local declaration (variable or label). */ | |
4394 | if ((TREE_CODE (expr) == VAR_DECL | |
4395 | && !TREE_STATIC (expr)) | |
4396 | || TREE_CODE (expr) == LABEL_DECL) | |
4397 | { | |
4398 | /* Lookup the declaration. */ | |
4399 | n = (tree *) pointer_map_contains (st, expr); | |
4400 | ||
4401 | /* If it's there, remap it. */ | |
4402 | if (n) | |
4403 | *tp = *n; | |
4404 | *walk_subtrees = 0; | |
4405 | } | |
4406 | else if (TREE_CODE (expr) == STATEMENT_LIST | |
4407 | || TREE_CODE (expr) == BIND_EXPR | |
4408 | || TREE_CODE (expr) == SAVE_EXPR) | |
4409 | gcc_unreachable (); | |
4410 | else if (TREE_CODE (expr) == TARGET_EXPR) | |
4411 | { | |
4412 | /* Don't mess with a TARGET_EXPR that hasn't been expanded. | |
4413 | It's OK for this to happen if it was part of a subtree that | |
4414 | isn't immediately expanded, such as operand 2 of another | |
4415 | TARGET_EXPR. */ | |
4416 | if (!TREE_OPERAND (expr, 1)) | |
4417 | { | |
4418 | TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3); | |
4419 | TREE_OPERAND (expr, 3) = NULL_TREE; | |
4420 | } | |
4421 | } | |
4422 | ||
4423 | /* Keep iterating. */ | |
4424 | return NULL_TREE; | |
4425 | } | |
4426 | ||
4427 | ||
4428 | /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local. | |
4429 | Using the splay_tree pointed to by ST (which is really a `splay_tree'), | |
4430 | remaps all local declarations to appropriate replacements in gimple | |
4431 | statements. */ | |
4432 | ||
4433 | static tree | |
4434 | replace_locals_stmt (gimple_stmt_iterator *gsip, | |
4435 | bool *handled_ops_p ATTRIBUTE_UNUSED, | |
4436 | struct walk_stmt_info *wi) | |
4437 | { | |
4438 | copy_body_data *id = (copy_body_data *) wi->info; | |
4439 | gimple stmt = gsi_stmt (*gsip); | |
4440 | ||
4441 | if (gimple_code (stmt) == GIMPLE_BIND) | |
4442 | { | |
4443 | tree block = gimple_bind_block (stmt); | |
4444 | ||
4445 | if (block) | |
4446 | { | |
4447 | remap_block (&block, id); | |
4448 | gimple_bind_set_block (stmt, block); | |
4449 | } | |
4450 | ||
4451 | /* This will remap a lot of the same decls again, but this should be | |
4452 | harmless. */ | |
4453 | if (gimple_bind_vars (stmt)) | |
526d73ab | 4454 | gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id)); |
726a989a RB |
4455 | } |
4456 | ||
4457 | /* Keep iterating. */ | |
4458 | return NULL_TREE; | |
4459 | } | |
4460 | ||
4461 | ||
4462 | /* Copies everything in SEQ and replaces variables and labels local to | |
4463 | current_function_decl. */ | |
4464 | ||
4465 | gimple_seq | |
4466 | copy_gimple_seq_and_replace_locals (gimple_seq seq) | |
4467 | { | |
4468 | copy_body_data id; | |
4469 | struct walk_stmt_info wi; | |
4470 | struct pointer_set_t *visited; | |
4471 | gimple_seq copy; | |
4472 | ||
4473 | /* There's nothing to do for NULL_TREE. */ | |
4474 | if (seq == NULL) | |
4475 | return seq; | |
4476 | ||
4477 | /* Set up ID. */ | |
4478 | memset (&id, 0, sizeof (id)); | |
4479 | id.src_fn = current_function_decl; | |
4480 | id.dst_fn = current_function_decl; | |
4481 | id.decl_map = pointer_map_create (); | |
b5b8b0ac | 4482 | id.debug_map = NULL; |
726a989a RB |
4483 | |
4484 | id.copy_decl = copy_decl_no_change; | |
4485 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
4486 | id.transform_new_cfg = false; | |
4487 | id.transform_return_to_modify = false; | |
4488 | id.transform_lang_insert_block = NULL; | |
4489 | ||
4490 | /* Walk the tree once to find local labels. */ | |
4491 | memset (&wi, 0, sizeof (wi)); | |
4492 | visited = pointer_set_create (); | |
4493 | wi.info = &id; | |
4494 | wi.pset = visited; | |
4495 | walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi); | |
4496 | pointer_set_destroy (visited); | |
4497 | ||
4498 | copy = gimple_seq_copy (seq); | |
4499 | ||
4500 | /* Walk the copy, remapping decls. */ | |
4501 | memset (&wi, 0, sizeof (wi)); | |
4502 | wi.info = &id; | |
4503 | walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi); | |
4504 | ||
4505 | /* Clean up. */ | |
4506 | pointer_map_destroy (id.decl_map); | |
b5b8b0ac AO |
4507 | if (id.debug_map) |
4508 | pointer_map_destroy (id.debug_map); | |
726a989a RB |
4509 | |
4510 | return copy; | |
4511 | } | |
4512 | ||
4513 | ||
6de9cd9a | 4514 | /* Allow someone to determine if SEARCH is a child of TOP from gdb. */ |
aa4a53af | 4515 | |
6de9cd9a DN |
4516 | static tree |
4517 | debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data) | |
4518 | { | |
4519 | if (*tp == data) | |
4520 | return (tree) data; | |
4521 | else | |
4522 | return NULL; | |
4523 | } | |
4524 | ||
6de9cd9a DN |
4525 | bool |
4526 | debug_find_tree (tree top, tree search) | |
4527 | { | |
4528 | return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0; | |
4529 | } | |
4530 | ||
e21aff8a | 4531 | |
6de9cd9a DN |
4532 | /* Declare the variables created by the inliner. Add all the variables in |
4533 | VARS to BIND_EXPR. */ | |
4534 | ||
4535 | static void | |
e21aff8a | 4536 | declare_inline_vars (tree block, tree vars) |
6de9cd9a | 4537 | { |
84936f6f RH |
4538 | tree t; |
4539 | for (t = vars; t; t = TREE_CHAIN (t)) | |
9659ce8b JH |
4540 | { |
4541 | DECL_SEEN_IN_BIND_EXPR_P (t) = 1; | |
4542 | gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t)); | |
cb91fab0 | 4543 | cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls); |
9659ce8b | 4544 | } |
6de9cd9a | 4545 | |
e21aff8a SB |
4546 | if (block) |
4547 | BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars); | |
4548 | } | |
4549 | ||
19734dd8 | 4550 | /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN, |
1b369fae RH |
4551 | but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to |
4552 | VAR_DECL translation. */ | |
19734dd8 | 4553 | |
1b369fae RH |
4554 | static tree |
4555 | copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy) | |
19734dd8 | 4556 | { |
19734dd8 RL |
4557 | /* Don't generate debug information for the copy if we wouldn't have |
4558 | generated it for the copy either. */ | |
4559 | DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl); | |
4560 | DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl); | |
4561 | ||
4562 | /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what | |
b8698a0f | 4563 | declaration inspired this copy. */ |
19734dd8 RL |
4564 | DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl); |
4565 | ||
4566 | /* The new variable/label has no RTL, yet. */ | |
68a976f2 RL |
4567 | if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL) |
4568 | && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy)) | |
19734dd8 | 4569 | SET_DECL_RTL (copy, NULL_RTX); |
b8698a0f | 4570 | |
19734dd8 RL |
4571 | /* These args would always appear unused, if not for this. */ |
4572 | TREE_USED (copy) = 1; | |
4573 | ||
4574 | /* Set the context for the new declaration. */ | |
4575 | if (!DECL_CONTEXT (decl)) | |
4576 | /* Globals stay global. */ | |
4577 | ; | |
1b369fae | 4578 | else if (DECL_CONTEXT (decl) != id->src_fn) |
19734dd8 RL |
4579 | /* Things that weren't in the scope of the function we're inlining |
4580 | from aren't in the scope we're inlining to, either. */ | |
4581 | ; | |
4582 | else if (TREE_STATIC (decl)) | |
4583 | /* Function-scoped static variables should stay in the original | |
4584 | function. */ | |
4585 | ; | |
4586 | else | |
4587 | /* Ordinary automatic local variables are now in the scope of the | |
4588 | new function. */ | |
1b369fae | 4589 | DECL_CONTEXT (copy) = id->dst_fn; |
19734dd8 RL |
4590 | |
4591 | return copy; | |
4592 | } | |
4593 | ||
1b369fae RH |
4594 | static tree |
4595 | copy_decl_to_var (tree decl, copy_body_data *id) | |
4596 | { | |
4597 | tree copy, type; | |
4598 | ||
4599 | gcc_assert (TREE_CODE (decl) == PARM_DECL | |
4600 | || TREE_CODE (decl) == RESULT_DECL); | |
4601 | ||
4602 | type = TREE_TYPE (decl); | |
4603 | ||
c2255bc4 AH |
4604 | copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn), |
4605 | VAR_DECL, DECL_NAME (decl), type); | |
25a6a873 RG |
4606 | if (DECL_PT_UID_SET_P (decl)) |
4607 | SET_DECL_PT_UID (copy, DECL_PT_UID (decl)); | |
1b369fae RH |
4608 | TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl); |
4609 | TREE_READONLY (copy) = TREE_READONLY (decl); | |
4610 | TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl); | |
0890b981 | 4611 | DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl); |
1b369fae RH |
4612 | |
4613 | return copy_decl_for_dup_finish (id, decl, copy); | |
4614 | } | |
4615 | ||
c08cd4c1 JM |
4616 | /* Like copy_decl_to_var, but create a return slot object instead of a |
4617 | pointer variable for return by invisible reference. */ | |
4618 | ||
4619 | static tree | |
4620 | copy_result_decl_to_var (tree decl, copy_body_data *id) | |
4621 | { | |
4622 | tree copy, type; | |
4623 | ||
4624 | gcc_assert (TREE_CODE (decl) == PARM_DECL | |
4625 | || TREE_CODE (decl) == RESULT_DECL); | |
4626 | ||
4627 | type = TREE_TYPE (decl); | |
4628 | if (DECL_BY_REFERENCE (decl)) | |
4629 | type = TREE_TYPE (type); | |
4630 | ||
c2255bc4 AH |
4631 | copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn), |
4632 | VAR_DECL, DECL_NAME (decl), type); | |
25a6a873 RG |
4633 | if (DECL_PT_UID_SET_P (decl)) |
4634 | SET_DECL_PT_UID (copy, DECL_PT_UID (decl)); | |
c08cd4c1 JM |
4635 | TREE_READONLY (copy) = TREE_READONLY (decl); |
4636 | TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl); | |
4637 | if (!DECL_BY_REFERENCE (decl)) | |
4638 | { | |
4639 | TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl); | |
0890b981 | 4640 | DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl); |
c08cd4c1 JM |
4641 | } |
4642 | ||
4643 | return copy_decl_for_dup_finish (id, decl, copy); | |
4644 | } | |
4645 | ||
9ff420f1 | 4646 | tree |
1b369fae RH |
4647 | copy_decl_no_change (tree decl, copy_body_data *id) |
4648 | { | |
4649 | tree copy; | |
4650 | ||
4651 | copy = copy_node (decl); | |
4652 | ||
4653 | /* The COPY is not abstract; it will be generated in DST_FN. */ | |
4654 | DECL_ABSTRACT (copy) = 0; | |
4655 | lang_hooks.dup_lang_specific_decl (copy); | |
4656 | ||
4657 | /* TREE_ADDRESSABLE isn't used to indicate that a label's address has | |
4658 | been taken; it's for internal bookkeeping in expand_goto_internal. */ | |
4659 | if (TREE_CODE (copy) == LABEL_DECL) | |
4660 | { | |
4661 | TREE_ADDRESSABLE (copy) = 0; | |
4662 | LABEL_DECL_UID (copy) = -1; | |
4663 | } | |
4664 | ||
4665 | return copy_decl_for_dup_finish (id, decl, copy); | |
4666 | } | |
4667 | ||
4668 | static tree | |
4669 | copy_decl_maybe_to_var (tree decl, copy_body_data *id) | |
4670 | { | |
4671 | if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL) | |
4672 | return copy_decl_to_var (decl, id); | |
4673 | else | |
4674 | return copy_decl_no_change (decl, id); | |
4675 | } | |
4676 | ||
19734dd8 RL |
4677 | /* Return a copy of the function's argument tree. */ |
4678 | static tree | |
c6f7cfc1 JH |
4679 | copy_arguments_for_versioning (tree orig_parm, copy_body_data * id, |
4680 | bitmap args_to_skip, tree *vars) | |
19734dd8 | 4681 | { |
c6f7cfc1 JH |
4682 | tree arg, *parg; |
4683 | tree new_parm = NULL; | |
4684 | int i = 0; | |
19734dd8 | 4685 | |
c6f7cfc1 JH |
4686 | parg = &new_parm; |
4687 | ||
4688 | for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++) | |
4689 | if (!args_to_skip || !bitmap_bit_p (args_to_skip, i)) | |
4690 | { | |
4691 | tree new_tree = remap_decl (arg, id); | |
4692 | lang_hooks.dup_lang_specific_decl (new_tree); | |
4693 | *parg = new_tree; | |
4694 | parg = &TREE_CHAIN (new_tree); | |
4695 | } | |
eb50f5f4 | 4696 | else if (!pointer_map_contains (id->decl_map, arg)) |
c6f7cfc1 JH |
4697 | { |
4698 | /* Make an equivalent VAR_DECL. If the argument was used | |
4699 | as temporary variable later in function, the uses will be | |
4700 | replaced by local variable. */ | |
4701 | tree var = copy_decl_to_var (arg, id); | |
4702 | get_var_ann (var); | |
4703 | add_referenced_var (var); | |
4704 | insert_decl_map (id, arg, var); | |
4705 | /* Declare this new variable. */ | |
4706 | TREE_CHAIN (var) = *vars; | |
4707 | *vars = var; | |
4708 | } | |
4709 | return new_parm; | |
19734dd8 RL |
4710 | } |
4711 | ||
4712 | /* Return a copy of the function's static chain. */ | |
4713 | static tree | |
1b369fae | 4714 | copy_static_chain (tree static_chain, copy_body_data * id) |
19734dd8 RL |
4715 | { |
4716 | tree *chain_copy, *pvar; | |
4717 | ||
4718 | chain_copy = &static_chain; | |
4719 | for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar)) | |
4720 | { | |
82d6e6fc KG |
4721 | tree new_tree = remap_decl (*pvar, id); |
4722 | lang_hooks.dup_lang_specific_decl (new_tree); | |
4723 | TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar); | |
4724 | *pvar = new_tree; | |
19734dd8 RL |
4725 | } |
4726 | return static_chain; | |
4727 | } | |
4728 | ||
4729 | /* Return true if the function is allowed to be versioned. | |
4730 | This is a guard for the versioning functionality. */ | |
27dbd3ac | 4731 | |
19734dd8 RL |
4732 | bool |
4733 | tree_versionable_function_p (tree fndecl) | |
4734 | { | |
86631ea3 MJ |
4735 | return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl)) |
4736 | && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL); | |
19734dd8 RL |
4737 | } |
4738 | ||
9187e02d JH |
4739 | /* Delete all unreachable basic blocks and update callgraph. |
4740 | Doing so is somewhat nontrivial because we need to update all clones and | |
4741 | remove inline function that become unreachable. */ | |
9f5e9983 | 4742 | |
9187e02d JH |
4743 | static bool |
4744 | delete_unreachable_blocks_update_callgraph (copy_body_data *id) | |
9f5e9983 | 4745 | { |
9187e02d JH |
4746 | bool changed = false; |
4747 | basic_block b, next_bb; | |
4748 | ||
4749 | find_unreachable_blocks (); | |
4750 | ||
4751 | /* Delete all unreachable basic blocks. */ | |
4752 | ||
4753 | for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb) | |
4754 | { | |
4755 | next_bb = b->next_bb; | |
4756 | ||
4757 | if (!(b->flags & BB_REACHABLE)) | |
4758 | { | |
4759 | gimple_stmt_iterator bsi; | |
4760 | ||
4761 | for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi)) | |
4762 | if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL) | |
4763 | { | |
4764 | struct cgraph_edge *e; | |
4765 | struct cgraph_node *node; | |
4766 | ||
4767 | if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL) | |
4768 | { | |
4769 | if (!e->inline_failed) | |
4770 | cgraph_remove_node_and_inline_clones (e->callee); | |
4771 | else | |
4772 | cgraph_remove_edge (e); | |
4773 | } | |
4774 | if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES | |
4775 | && id->dst_node->clones) | |
4776 | for (node = id->dst_node->clones; node != id->dst_node;) | |
4777 | { | |
4778 | if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL) | |
4779 | { | |
4780 | if (!e->inline_failed) | |
4781 | cgraph_remove_node_and_inline_clones (e->callee); | |
4782 | else | |
4783 | cgraph_remove_edge (e); | |
4784 | } | |
b8698a0f | 4785 | |
9187e02d JH |
4786 | if (node->clones) |
4787 | node = node->clones; | |
4788 | else if (node->next_sibling_clone) | |
4789 | node = node->next_sibling_clone; | |
4790 | else | |
4791 | { | |
4792 | while (node != id->dst_node && !node->next_sibling_clone) | |
4793 | node = node->clone_of; | |
4794 | if (node != id->dst_node) | |
4795 | node = node->next_sibling_clone; | |
4796 | } | |
4797 | } | |
4798 | } | |
4799 | delete_basic_block (b); | |
4800 | changed = true; | |
4801 | } | |
4802 | } | |
4803 | ||
4804 | if (changed) | |
4805 | tidy_fallthru_edges (); | |
9187e02d | 4806 | return changed; |
9f5e9983 JJ |
4807 | } |
4808 | ||
08ad1d6d JH |
4809 | /* Update clone info after duplication. */ |
4810 | ||
4811 | static void | |
4812 | update_clone_info (copy_body_data * id) | |
4813 | { | |
4814 | struct cgraph_node *node; | |
4815 | if (!id->dst_node->clones) | |
4816 | return; | |
4817 | for (node = id->dst_node->clones; node != id->dst_node;) | |
4818 | { | |
4819 | /* First update replace maps to match the new body. */ | |
4820 | if (node->clone.tree_map) | |
4821 | { | |
4822 | unsigned int i; | |
4823 | for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++) | |
4824 | { | |
4825 | struct ipa_replace_map *replace_info; | |
4826 | replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i); | |
4827 | walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL); | |
4828 | walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL); | |
4829 | } | |
4830 | } | |
4831 | if (node->clones) | |
4832 | node = node->clones; | |
4833 | else if (node->next_sibling_clone) | |
4834 | node = node->next_sibling_clone; | |
4835 | else | |
4836 | { | |
4837 | while (node != id->dst_node && !node->next_sibling_clone) | |
4838 | node = node->clone_of; | |
4839 | if (node != id->dst_node) | |
4840 | node = node->next_sibling_clone; | |
4841 | } | |
4842 | } | |
4843 | } | |
4844 | ||
19734dd8 RL |
4845 | /* Create a copy of a function's tree. |
4846 | OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes | |
4847 | of the original function and the new copied function | |
b8698a0f L |
4848 | respectively. In case we want to replace a DECL |
4849 | tree with another tree while duplicating the function's | |
4850 | body, TREE_MAP represents the mapping between these | |
ea99e0be JH |
4851 | trees. If UPDATE_CLONES is set, the call_stmt fields |
4852 | of edges of clones of the function will be updated. */ | |
19734dd8 | 4853 | void |
27dbd3ac RH |
4854 | tree_function_versioning (tree old_decl, tree new_decl, |
4855 | VEC(ipa_replace_map_p,gc)* tree_map, | |
c6f7cfc1 | 4856 | bool update_clones, bitmap args_to_skip) |
19734dd8 RL |
4857 | { |
4858 | struct cgraph_node *old_version_node; | |
4859 | struct cgraph_node *new_version_node; | |
1b369fae | 4860 | copy_body_data id; |
110cfe1c | 4861 | tree p; |
19734dd8 RL |
4862 | unsigned i; |
4863 | struct ipa_replace_map *replace_info; | |
b5b8b0ac | 4864 | basic_block old_entry_block, bb; |
0f1961a2 JH |
4865 | VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10); |
4866 | ||
19734dd8 | 4867 | tree t_step; |
873aa8f5 | 4868 | tree old_current_function_decl = current_function_decl; |
0f1961a2 | 4869 | tree vars = NULL_TREE; |
19734dd8 RL |
4870 | |
4871 | gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL | |
4872 | && TREE_CODE (new_decl) == FUNCTION_DECL); | |
4873 | DECL_POSSIBLY_INLINED (old_decl) = 1; | |
4874 | ||
4875 | old_version_node = cgraph_node (old_decl); | |
4876 | new_version_node = cgraph_node (new_decl); | |
4877 | ||
a3aadcc5 JH |
4878 | /* Output the inlining info for this abstract function, since it has been |
4879 | inlined. If we don't do this now, we can lose the information about the | |
4880 | variables in the function when the blocks get blown away as soon as we | |
4881 | remove the cgraph node. */ | |
4882 | (*debug_hooks->outlining_inline_function) (old_decl); | |
4883 | ||
19734dd8 RL |
4884 | DECL_ARTIFICIAL (new_decl) = 1; |
4885 | DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl); | |
f9417da1 | 4886 | DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl); |
19734dd8 | 4887 | |
3d283195 JH |
4888 | /* Prepare the data structures for the tree copy. */ |
4889 | memset (&id, 0, sizeof (id)); | |
4890 | ||
19734dd8 | 4891 | /* Generate a new name for the new version. */ |
9187e02d | 4892 | id.statements_to_fold = pointer_set_create (); |
b5b8b0ac | 4893 | |
6be42dd4 | 4894 | id.decl_map = pointer_map_create (); |
b5b8b0ac | 4895 | id.debug_map = NULL; |
1b369fae RH |
4896 | id.src_fn = old_decl; |
4897 | id.dst_fn = new_decl; | |
4898 | id.src_node = old_version_node; | |
4899 | id.dst_node = new_version_node; | |
4900 | id.src_cfun = DECL_STRUCT_FUNCTION (old_decl); | |
0e3776db JH |
4901 | if (id.src_node->ipa_transforms_to_apply) |
4902 | { | |
4903 | VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply; | |
4904 | unsigned int i; | |
4905 | ||
4906 | id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap, | |
4907 | id.src_node->ipa_transforms_to_apply); | |
4908 | for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++) | |
4909 | VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply, | |
4910 | VEC_index (ipa_opt_pass, | |
4911 | old_transforms_to_apply, | |
4912 | i)); | |
4913 | } | |
b8698a0f | 4914 | |
1b369fae RH |
4915 | id.copy_decl = copy_decl_no_change; |
4916 | id.transform_call_graph_edges | |
4917 | = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE; | |
4918 | id.transform_new_cfg = true; | |
4919 | id.transform_return_to_modify = false; | |
9ff420f1 | 4920 | id.transform_lang_insert_block = NULL; |
1b369fae | 4921 | |
19734dd8 | 4922 | current_function_decl = new_decl; |
110cfe1c JH |
4923 | old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION |
4924 | (DECL_STRUCT_FUNCTION (old_decl)); | |
4925 | initialize_cfun (new_decl, old_decl, | |
0d63a740 | 4926 | old_entry_block->count); |
1755aad0 RG |
4927 | DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta |
4928 | = id.src_cfun->gimple_df->ipa_pta; | |
110cfe1c | 4929 | push_cfun (DECL_STRUCT_FUNCTION (new_decl)); |
b8698a0f | 4930 | |
19734dd8 RL |
4931 | /* Copy the function's static chain. */ |
4932 | p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl; | |
4933 | if (p) | |
4934 | DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl = | |
4935 | copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl, | |
4936 | &id); | |
b8698a0f | 4937 | |
19734dd8 RL |
4938 | /* If there's a tree_map, prepare for substitution. */ |
4939 | if (tree_map) | |
9187e02d | 4940 | for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++) |
19734dd8 | 4941 | { |
0f1961a2 | 4942 | gimple init; |
9187e02d | 4943 | replace_info = VEC_index (ipa_replace_map_p, tree_map, i); |
1b369fae | 4944 | if (replace_info->replace_p) |
00fc2333 | 4945 | { |
657c0925 JH |
4946 | tree op = replace_info->new_tree; |
4947 | ||
4948 | STRIP_NOPS (op); | |
4949 | ||
4950 | if (TREE_CODE (op) == VIEW_CONVERT_EXPR) | |
4951 | op = TREE_OPERAND (op, 0); | |
b8698a0f | 4952 | |
657c0925 | 4953 | if (TREE_CODE (op) == ADDR_EXPR) |
00fc2333 | 4954 | { |
657c0925 | 4955 | op = TREE_OPERAND (op, 0); |
00fc2333 JH |
4956 | while (handled_component_p (op)) |
4957 | op = TREE_OPERAND (op, 0); | |
4958 | if (TREE_CODE (op) == VAR_DECL) | |
4959 | add_referenced_var (op); | |
4960 | } | |
0f1961a2 JH |
4961 | gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL); |
4962 | init = setup_one_parameter (&id, replace_info->old_tree, | |
4963 | replace_info->new_tree, id.src_fn, | |
4964 | NULL, | |
4965 | &vars); | |
4966 | if (init) | |
4967 | VEC_safe_push (gimple, heap, init_stmts, init); | |
00fc2333 | 4968 | } |
19734dd8 | 4969 | } |
eb50f5f4 JH |
4970 | /* Copy the function's arguments. */ |
4971 | if (DECL_ARGUMENTS (old_decl) != NULL_TREE) | |
4972 | DECL_ARGUMENTS (new_decl) = | |
4973 | copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id, | |
4974 | args_to_skip, &vars); | |
b8698a0f | 4975 | |
eb50f5f4 | 4976 | DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id); |
b8698a0f | 4977 | |
eb50f5f4 JH |
4978 | /* Renumber the lexical scoping (non-code) blocks consecutively. */ |
4979 | number_blocks (id.dst_fn); | |
b8698a0f | 4980 | |
0f1961a2 | 4981 | declare_inline_vars (DECL_INITIAL (new_decl), vars); |
9187e02d | 4982 | |
cb91fab0 | 4983 | if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE) |
19734dd8 | 4984 | /* Add local vars. */ |
cb91fab0 | 4985 | for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls; |
19734dd8 RL |
4986 | t_step; t_step = TREE_CHAIN (t_step)) |
4987 | { | |
4988 | tree var = TREE_VALUE (t_step); | |
4989 | if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var)) | |
cb91fab0 | 4990 | cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls); |
526d73ab | 4991 | else if (!can_be_nonlocal (var, &id)) |
cb91fab0 | 4992 | cfun->local_decls = |
19734dd8 | 4993 | tree_cons (NULL_TREE, remap_decl (var, &id), |
cb91fab0 | 4994 | cfun->local_decls); |
19734dd8 | 4995 | } |
b8698a0f | 4996 | |
19734dd8 | 4997 | /* Copy the Function's body. */ |
0d63a740 | 4998 | copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE, |
27dbd3ac | 4999 | ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR); |
b8698a0f | 5000 | |
19734dd8 RL |
5001 | if (DECL_RESULT (old_decl) != NULL_TREE) |
5002 | { | |
5003 | tree *res_decl = &DECL_RESULT (old_decl); | |
5004 | DECL_RESULT (new_decl) = remap_decl (*res_decl, &id); | |
5005 | lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl)); | |
5006 | } | |
b8698a0f | 5007 | |
19734dd8 RL |
5008 | /* Renumber the lexical scoping (non-code) blocks consecutively. */ |
5009 | number_blocks (new_decl); | |
5010 | ||
b5b8b0ac AO |
5011 | /* We want to create the BB unconditionally, so that the addition of |
5012 | debug stmts doesn't affect BB count, which may in the end cause | |
5013 | codegen differences. */ | |
5014 | bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR)); | |
5015 | while (VEC_length (gimple, init_stmts)) | |
5016 | insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts)); | |
08ad1d6d | 5017 | update_clone_info (&id); |
0f1961a2 | 5018 | |
27dbd3ac RH |
5019 | /* Remap the nonlocal_goto_save_area, if any. */ |
5020 | if (cfun->nonlocal_goto_save_area) | |
5021 | { | |
5022 | struct walk_stmt_info wi; | |
5023 | ||
5024 | memset (&wi, 0, sizeof (wi)); | |
5025 | wi.info = &id; | |
5026 | walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL); | |
5027 | } | |
5028 | ||
19734dd8 | 5029 | /* Clean up. */ |
6be42dd4 | 5030 | pointer_map_destroy (id.decl_map); |
b5b8b0ac AO |
5031 | if (id.debug_map) |
5032 | pointer_map_destroy (id.debug_map); | |
5006671f RG |
5033 | free_dominance_info (CDI_DOMINATORS); |
5034 | free_dominance_info (CDI_POST_DOMINATORS); | |
9187e02d JH |
5035 | |
5036 | fold_marked_statements (0, id.statements_to_fold); | |
5037 | pointer_set_destroy (id.statements_to_fold); | |
5038 | fold_cond_expr_cond (); | |
5039 | delete_unreachable_blocks_update_callgraph (&id); | |
5040 | update_ssa (TODO_update_ssa); | |
5041 | free_dominance_info (CDI_DOMINATORS); | |
5042 | free_dominance_info (CDI_POST_DOMINATORS); | |
5043 | ||
b5b8b0ac | 5044 | gcc_assert (!id.debug_stmts); |
0f1961a2 | 5045 | VEC_free (gimple, heap, init_stmts); |
110cfe1c | 5046 | pop_cfun (); |
873aa8f5 JH |
5047 | current_function_decl = old_current_function_decl; |
5048 | gcc_assert (!current_function_decl | |
5049 | || DECL_STRUCT_FUNCTION (current_function_decl) == cfun); | |
19734dd8 RL |
5050 | return; |
5051 | } | |
5052 | ||
f82a627c EB |
5053 | /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate |
5054 | the callee and return the inlined body on success. */ | |
5055 | ||
5056 | tree | |
5057 | maybe_inline_call_in_expr (tree exp) | |
5058 | { | |
5059 | tree fn = get_callee_fndecl (exp); | |
5060 | ||
5061 | /* We can only try to inline "const" functions. */ | |
5062 | if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn)) | |
5063 | { | |
5064 | struct pointer_map_t *decl_map = pointer_map_create (); | |
5065 | call_expr_arg_iterator iter; | |
5066 | copy_body_data id; | |
5067 | tree param, arg, t; | |
5068 | ||
5069 | /* Remap the parameters. */ | |
5070 | for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter); | |
5071 | param; | |
5072 | param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter)) | |
5073 | *pointer_map_insert (decl_map, param) = arg; | |
5074 | ||
5075 | memset (&id, 0, sizeof (id)); | |
5076 | id.src_fn = fn; | |
5077 | id.dst_fn = current_function_decl; | |
5078 | id.src_cfun = DECL_STRUCT_FUNCTION (fn); | |
5079 | id.decl_map = decl_map; | |
5080 | ||
5081 | id.copy_decl = copy_decl_no_change; | |
5082 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
5083 | id.transform_new_cfg = false; | |
5084 | id.transform_return_to_modify = true; | |
5085 | id.transform_lang_insert_block = false; | |
5086 | ||
5087 | /* Make sure not to unshare trees behind the front-end's back | |
5088 | since front-end specific mechanisms may rely on sharing. */ | |
5089 | id.regimplify = false; | |
5090 | id.do_not_unshare = true; | |
5091 | ||
5092 | /* We're not inside any EH region. */ | |
1d65f45c | 5093 | id.eh_lp_nr = 0; |
f82a627c EB |
5094 | |
5095 | t = copy_tree_body (&id); | |
5096 | pointer_map_destroy (decl_map); | |
5097 | ||
5098 | /* We can only return something suitable for use in a GENERIC | |
5099 | expression tree. */ | |
5100 | if (TREE_CODE (t) == MODIFY_EXPR) | |
5101 | return TREE_OPERAND (t, 1); | |
5102 | } | |
5103 | ||
5104 | return NULL_TREE; | |
5105 | } | |
5106 | ||
52dd234b RH |
5107 | /* Duplicate a type, fields and all. */ |
5108 | ||
5109 | tree | |
5110 | build_duplicate_type (tree type) | |
5111 | { | |
1b369fae | 5112 | struct copy_body_data id; |
52dd234b RH |
5113 | |
5114 | memset (&id, 0, sizeof (id)); | |
1b369fae RH |
5115 | id.src_fn = current_function_decl; |
5116 | id.dst_fn = current_function_decl; | |
5117 | id.src_cfun = cfun; | |
6be42dd4 | 5118 | id.decl_map = pointer_map_create (); |
b5b8b0ac | 5119 | id.debug_map = NULL; |
4009f2e7 | 5120 | id.copy_decl = copy_decl_no_change; |
52dd234b RH |
5121 | |
5122 | type = remap_type_1 (type, &id); | |
5123 | ||
6be42dd4 | 5124 | pointer_map_destroy (id.decl_map); |
b5b8b0ac AO |
5125 | if (id.debug_map) |
5126 | pointer_map_destroy (id.debug_map); | |
52dd234b | 5127 | |
f31c9f09 DG |
5128 | TYPE_CANONICAL (type) = type; |
5129 | ||
52dd234b RH |
5130 | return type; |
5131 | } | |
ab442df7 MM |
5132 | |
5133 | /* Return whether it is safe to inline a function because it used different | |
6eb29714 XDL |
5134 | target specific options or call site actual types mismatch parameter types. |
5135 | E is the call edge to be checked. */ | |
ab442df7 | 5136 | bool |
6eb29714 | 5137 | tree_can_inline_p (struct cgraph_edge *e) |
ab442df7 | 5138 | { |
5779e713 MM |
5139 | #if 0 |
5140 | /* This causes a regression in SPEC in that it prevents a cold function from | |
5141 | inlining a hot function. Perhaps this should only apply to functions | |
5142 | that the user declares hot/cold/optimize explicitly. */ | |
5143 | ||
ab442df7 MM |
5144 | /* Don't inline a function with a higher optimization level than the |
5145 | caller, or with different space constraints (hot/cold functions). */ | |
5146 | tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller); | |
5147 | tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee); | |
5148 | ||
5149 | if (caller_tree != callee_tree) | |
5150 | { | |
5151 | struct cl_optimization *caller_opt | |
5152 | = TREE_OPTIMIZATION ((caller_tree) | |
5153 | ? caller_tree | |
5154 | : optimization_default_node); | |
5155 | ||
5156 | struct cl_optimization *callee_opt | |
5157 | = TREE_OPTIMIZATION ((callee_tree) | |
5158 | ? callee_tree | |
5159 | : optimization_default_node); | |
5160 | ||
5161 | if ((caller_opt->optimize > callee_opt->optimize) | |
5162 | || (caller_opt->optimize_size != callee_opt->optimize_size)) | |
5163 | return false; | |
5164 | } | |
5779e713 | 5165 | #endif |
8fd8a06f | 5166 | tree caller, callee, lhs; |
6eb29714 XDL |
5167 | |
5168 | caller = e->caller->decl; | |
5169 | callee = e->callee->decl; | |
ab442df7 | 5170 | |
f9417da1 RG |
5171 | /* We cannot inline a function that uses a different EH personality |
5172 | than the caller. */ | |
5173 | if (DECL_FUNCTION_PERSONALITY (caller) | |
5174 | && DECL_FUNCTION_PERSONALITY (callee) | |
5175 | && (DECL_FUNCTION_PERSONALITY (caller) | |
5176 | != DECL_FUNCTION_PERSONALITY (callee))) | |
5177 | { | |
5178 | e->inline_failed = CIF_UNSPECIFIED; | |
5179 | gimple_call_set_cannot_inline (e->call_stmt, true); | |
5180 | return false; | |
5181 | } | |
5182 | ||
ab442df7 | 5183 | /* Allow the backend to decide if inlining is ok. */ |
6eb29714 XDL |
5184 | if (!targetm.target_option.can_inline_p (caller, callee)) |
5185 | { | |
5186 | e->inline_failed = CIF_TARGET_OPTION_MISMATCH; | |
5187 | gimple_call_set_cannot_inline (e->call_stmt, true); | |
d7f09764 | 5188 | e->call_stmt_cannot_inline_p = true; |
6eb29714 XDL |
5189 | return false; |
5190 | } | |
5191 | ||
8fd8a06f RG |
5192 | /* Do not inline calls where we cannot triviall work around mismatches |
5193 | in argument or return types. */ | |
d7f09764 | 5194 | if (e->call_stmt |
8fd8a06f RG |
5195 | && ((DECL_RESULT (callee) |
5196 | && !DECL_BY_REFERENCE (DECL_RESULT (callee)) | |
5197 | && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE | |
5198 | && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)), | |
5199 | TREE_TYPE (lhs)) | |
5200 | && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs)) | |
5201 | || !gimple_check_call_args (e->call_stmt))) | |
6eb29714 XDL |
5202 | { |
5203 | e->inline_failed = CIF_MISMATCHED_ARGUMENTS; | |
5204 | gimple_call_set_cannot_inline (e->call_stmt, true); | |
d7f09764 | 5205 | e->call_stmt_cannot_inline_p = true; |
6eb29714 XDL |
5206 | return false; |
5207 | } | |
5208 | ||
5209 | return true; | |
ab442df7 | 5210 | } |