]>
Commit | Line | Data |
---|---|---|
ac534736 | 1 | /* Tree inlining. |
082ab5ff | 2 | Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 |
ebb07520 | 3 | Free Software Foundation, Inc. |
588d3ade AO |
4 | Contributed by Alexandre Oliva <aoliva@redhat.com> |
5 | ||
54a7b573 | 6 | This file is part of GCC. |
588d3ade | 7 | |
54a7b573 | 8 | GCC is free software; you can redistribute it and/or modify |
588d3ade | 9 | it under the terms of the GNU General Public License as published by |
9dcd6f09 | 10 | the Free Software Foundation; either version 3, or (at your option) |
588d3ade AO |
11 | any later version. |
12 | ||
54a7b573 | 13 | GCC is distributed in the hope that it will be useful, |
588d3ade AO |
14 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
15 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
16 | GNU General Public License for more details. | |
17 | ||
18 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
588d3ade AO |
21 | |
22 | #include "config.h" | |
23 | #include "system.h" | |
4977bab6 ZW |
24 | #include "coretypes.h" |
25 | #include "tm.h" | |
718f9c0f MLI |
26 | #include "toplev.h" /* floor_log2 */ |
27 | #include "diagnostic-core.h" | |
588d3ade AO |
28 | #include "tree.h" |
29 | #include "tree-inline.h" | |
d4e4baa9 AO |
30 | #include "flags.h" |
31 | #include "params.h" | |
32 | #include "input.h" | |
33 | #include "insn-config.h" | |
d4e4baa9 | 34 | #include "hashtab.h" |
d23c55c2 | 35 | #include "langhooks.h" |
e21aff8a SB |
36 | #include "basic-block.h" |
37 | #include "tree-iterator.h" | |
1c4a429a | 38 | #include "cgraph.h" |
ddd2d57e | 39 | #include "intl.h" |
6de9cd9a | 40 | #include "tree-mudflap.h" |
089efaa4 | 41 | #include "tree-flow.h" |
18c6ada9 | 42 | #include "function.h" |
e21aff8a | 43 | #include "tree-flow.h" |
cf835838 | 44 | #include "tree-pretty-print.h" |
e21aff8a | 45 | #include "except.h" |
1eb3331e | 46 | #include "debug.h" |
e21aff8a | 47 | #include "pointer-set.h" |
19734dd8 | 48 | #include "ipa-prop.h" |
6946b3f7 | 49 | #include "value-prof.h" |
110cfe1c | 50 | #include "tree-pass.h" |
18177c7e RG |
51 | #include "target.h" |
52 | #include "integrate.h" | |
d4e4baa9 | 53 | |
2eb79bbb SB |
54 | #include "rtl.h" /* FIXME: For asm_str_count. */ |
55 | ||
6de9cd9a DN |
56 | /* I'm not real happy about this, but we need to handle gimple and |
57 | non-gimple trees. */ | |
726a989a | 58 | #include "gimple.h" |
588d3ade | 59 | |
1b369fae | 60 | /* Inlining, Cloning, Versioning, Parallelization |
e21aff8a SB |
61 | |
62 | Inlining: a function body is duplicated, but the PARM_DECLs are | |
63 | remapped into VAR_DECLs, and non-void RETURN_EXPRs become | |
726a989a | 64 | MODIFY_EXPRs that store to a dedicated returned-value variable. |
e21aff8a SB |
65 | The duplicated eh_region info of the copy will later be appended |
66 | to the info for the caller; the eh_region info in copied throwing | |
1d65f45c | 67 | statements and RESX statements are adjusted accordingly. |
e21aff8a | 68 | |
e21aff8a SB |
69 | Cloning: (only in C++) We have one body for a con/de/structor, and |
70 | multiple function decls, each with a unique parameter list. | |
71 | Duplicate the body, using the given splay tree; some parameters | |
72 | will become constants (like 0 or 1). | |
73 | ||
1b369fae RH |
74 | Versioning: a function body is duplicated and the result is a new |
75 | function rather than into blocks of an existing function as with | |
76 | inlining. Some parameters will become constants. | |
77 | ||
78 | Parallelization: a region of a function is duplicated resulting in | |
79 | a new function. Variables may be replaced with complex expressions | |
80 | to enable shared variable semantics. | |
81 | ||
e21aff8a SB |
82 | All of these will simultaneously lookup any callgraph edges. If |
83 | we're going to inline the duplicated function body, and the given | |
84 | function has some cloned callgraph nodes (one for each place this | |
85 | function will be inlined) those callgraph edges will be duplicated. | |
1b369fae | 86 | If we're cloning the body, those callgraph edges will be |
e21aff8a SB |
87 | updated to point into the new body. (Note that the original |
88 | callgraph node and edge list will not be altered.) | |
89 | ||
726a989a | 90 | See the CALL_EXPR handling case in copy_tree_body_r (). */ |
e21aff8a | 91 | |
d4e4baa9 AO |
92 | /* To Do: |
93 | ||
94 | o In order to make inlining-on-trees work, we pessimized | |
95 | function-local static constants. In particular, they are now | |
96 | always output, even when not addressed. Fix this by treating | |
97 | function-local static constants just like global static | |
98 | constants; the back-end already knows not to output them if they | |
99 | are not needed. | |
100 | ||
101 | o Provide heuristics to clamp inlining of recursive template | |
102 | calls? */ | |
103 | ||
7f9bc51b | 104 | |
7f9bc51b ZD |
105 | /* Weights that estimate_num_insns uses to estimate the size of the |
106 | produced code. */ | |
107 | ||
108 | eni_weights eni_size_weights; | |
109 | ||
110 | /* Weights that estimate_num_insns uses to estimate the time necessary | |
111 | to execute the produced code. */ | |
112 | ||
113 | eni_weights eni_time_weights; | |
114 | ||
d4e4baa9 AO |
115 | /* Prototypes. */ |
116 | ||
6938f93f | 117 | static tree declare_return_variable (copy_body_data *, tree, tree, basic_block); |
1b369fae | 118 | static void remap_block (tree *, copy_body_data *); |
1b369fae | 119 | static void copy_bind_expr (tree *, int *, copy_body_data *); |
6de9cd9a | 120 | static tree mark_local_for_remap_r (tree *, int *, void *); |
19114537 | 121 | static void unsave_expr_1 (tree); |
6de9cd9a | 122 | static tree unsave_r (tree *, int *, void *); |
e21aff8a | 123 | static void declare_inline_vars (tree, tree); |
892c7e1e | 124 | static void remap_save_expr (tree *, void *, int *); |
4a283090 | 125 | static void prepend_lexical_block (tree current_block, tree new_block); |
1b369fae | 126 | static tree copy_decl_to_var (tree, copy_body_data *); |
c08cd4c1 | 127 | static tree copy_result_decl_to_var (tree, copy_body_data *); |
1b369fae | 128 | static tree copy_decl_maybe_to_var (tree, copy_body_data *); |
726a989a | 129 | static gimple remap_gimple_stmt (gimple, copy_body_data *); |
078c3644 | 130 | static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id); |
e21aff8a | 131 | |
5e20bdd7 JZ |
132 | /* Insert a tree->tree mapping for ID. Despite the name suggests |
133 | that the trees should be variables, it is used for more than that. */ | |
134 | ||
1b369fae RH |
135 | void |
136 | insert_decl_map (copy_body_data *id, tree key, tree value) | |
5e20bdd7 | 137 | { |
6be42dd4 | 138 | *pointer_map_insert (id->decl_map, key) = value; |
5e20bdd7 JZ |
139 | |
140 | /* Always insert an identity map as well. If we see this same new | |
141 | node again, we won't want to duplicate it a second time. */ | |
142 | if (key != value) | |
6be42dd4 | 143 | *pointer_map_insert (id->decl_map, value) = value; |
5e20bdd7 JZ |
144 | } |
145 | ||
b5b8b0ac AO |
146 | /* Insert a tree->tree mapping for ID. This is only used for |
147 | variables. */ | |
148 | ||
149 | static void | |
150 | insert_debug_decl_map (copy_body_data *id, tree key, tree value) | |
151 | { | |
152 | if (!gimple_in_ssa_p (id->src_cfun)) | |
153 | return; | |
154 | ||
155 | if (!MAY_HAVE_DEBUG_STMTS) | |
156 | return; | |
157 | ||
158 | if (!target_for_debug_bind (key)) | |
159 | return; | |
160 | ||
161 | gcc_assert (TREE_CODE (key) == PARM_DECL); | |
162 | gcc_assert (TREE_CODE (value) == VAR_DECL); | |
163 | ||
164 | if (!id->debug_map) | |
165 | id->debug_map = pointer_map_create (); | |
166 | ||
167 | *pointer_map_insert (id->debug_map, key) = value; | |
168 | } | |
169 | ||
082ab5ff JJ |
170 | /* If nonzero, we're remapping the contents of inlined debug |
171 | statements. If negative, an error has occurred, such as a | |
172 | reference to a variable that isn't available in the inlined | |
173 | context. */ | |
174 | static int processing_debug_stmt = 0; | |
175 | ||
110cfe1c JH |
176 | /* Construct new SSA name for old NAME. ID is the inline context. */ |
177 | ||
178 | static tree | |
179 | remap_ssa_name (tree name, copy_body_data *id) | |
180 | { | |
82d6e6fc | 181 | tree new_tree; |
6be42dd4 | 182 | tree *n; |
110cfe1c JH |
183 | |
184 | gcc_assert (TREE_CODE (name) == SSA_NAME); | |
185 | ||
6be42dd4 | 186 | n = (tree *) pointer_map_contains (id->decl_map, name); |
110cfe1c | 187 | if (n) |
129a37fc | 188 | return unshare_expr (*n); |
110cfe1c | 189 | |
082ab5ff JJ |
190 | if (processing_debug_stmt) |
191 | { | |
192 | processing_debug_stmt = -1; | |
193 | return name; | |
194 | } | |
195 | ||
110cfe1c JH |
196 | /* Do not set DEF_STMT yet as statement is not copied yet. We do that |
197 | in copy_bb. */ | |
82d6e6fc | 198 | new_tree = remap_decl (SSA_NAME_VAR (name), id); |
726a989a | 199 | |
110cfe1c | 200 | /* We might've substituted constant or another SSA_NAME for |
b8698a0f | 201 | the variable. |
110cfe1c JH |
202 | |
203 | Replace the SSA name representing RESULT_DECL by variable during | |
204 | inlining: this saves us from need to introduce PHI node in a case | |
205 | return value is just partly initialized. */ | |
82d6e6fc | 206 | if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL) |
110cfe1c JH |
207 | && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL |
208 | || !id->transform_return_to_modify)) | |
209 | { | |
25a6a873 | 210 | struct ptr_info_def *pi; |
82d6e6fc KG |
211 | new_tree = make_ssa_name (new_tree, NULL); |
212 | insert_decl_map (id, name, new_tree); | |
213 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree) | |
110cfe1c | 214 | = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name); |
82d6e6fc | 215 | TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree)); |
25a6a873 RG |
216 | /* At least IPA points-to info can be directly transferred. */ |
217 | if (id->src_cfun->gimple_df | |
218 | && id->src_cfun->gimple_df->ipa_pta | |
219 | && (pi = SSA_NAME_PTR_INFO (name)) | |
220 | && !pi->pt.anything) | |
221 | { | |
222 | struct ptr_info_def *new_pi = get_ptr_info (new_tree); | |
223 | new_pi->pt = pi->pt; | |
224 | } | |
726a989a | 225 | if (gimple_nop_p (SSA_NAME_DEF_STMT (name))) |
045685a9 JH |
226 | { |
227 | /* By inlining function having uninitialized variable, we might | |
228 | extend the lifetime (variable might get reused). This cause | |
229 | ICE in the case we end up extending lifetime of SSA name across | |
fa10beec | 230 | abnormal edge, but also increase register pressure. |
045685a9 | 231 | |
726a989a RB |
232 | We simply initialize all uninitialized vars by 0 except |
233 | for case we are inlining to very first BB. We can avoid | |
234 | this for all BBs that are not inside strongly connected | |
235 | regions of the CFG, but this is expensive to test. */ | |
236 | if (id->entry_bb | |
237 | && is_gimple_reg (SSA_NAME_VAR (name)) | |
dcad005d | 238 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name) |
045685a9 | 239 | && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL |
0723b99a | 240 | && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest |
045685a9 JH |
241 | || EDGE_COUNT (id->entry_bb->preds) != 1)) |
242 | { | |
726a989a RB |
243 | gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb); |
244 | gimple init_stmt; | |
b8698a0f | 245 | |
82d6e6fc KG |
246 | init_stmt = gimple_build_assign (new_tree, |
247 | fold_convert (TREE_TYPE (new_tree), | |
045685a9 | 248 | integer_zero_node)); |
726a989a | 249 | gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT); |
82d6e6fc | 250 | SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0; |
045685a9 JH |
251 | } |
252 | else | |
253 | { | |
82d6e6fc | 254 | SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop (); |
726a989a RB |
255 | if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) |
256 | == name) | |
82d6e6fc | 257 | set_default_def (SSA_NAME_VAR (new_tree), new_tree); |
045685a9 JH |
258 | } |
259 | } | |
110cfe1c JH |
260 | } |
261 | else | |
82d6e6fc KG |
262 | insert_decl_map (id, name, new_tree); |
263 | return new_tree; | |
110cfe1c JH |
264 | } |
265 | ||
e21aff8a | 266 | /* Remap DECL during the copying of the BLOCK tree for the function. */ |
d4e4baa9 | 267 | |
1b369fae RH |
268 | tree |
269 | remap_decl (tree decl, copy_body_data *id) | |
d4e4baa9 | 270 | { |
6be42dd4 | 271 | tree *n; |
e21aff8a SB |
272 | |
273 | /* We only remap local variables in the current function. */ | |
3c2a7a6a | 274 | |
e21aff8a SB |
275 | /* See if we have remapped this declaration. */ |
276 | ||
6be42dd4 | 277 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
e21aff8a | 278 | |
b5b8b0ac AO |
279 | if (!n && processing_debug_stmt) |
280 | { | |
281 | processing_debug_stmt = -1; | |
282 | return decl; | |
283 | } | |
284 | ||
e21aff8a SB |
285 | /* If we didn't already have an equivalent for this declaration, |
286 | create one now. */ | |
d4e4baa9 AO |
287 | if (!n) |
288 | { | |
d4e4baa9 | 289 | /* Make a copy of the variable or label. */ |
1b369fae | 290 | tree t = id->copy_decl (decl, id); |
b8698a0f | 291 | |
596b98ce AO |
292 | /* Remember it, so that if we encounter this local entity again |
293 | we can reuse this copy. Do this early because remap_type may | |
294 | need this decl for TYPE_STUB_DECL. */ | |
295 | insert_decl_map (id, decl, t); | |
296 | ||
1b369fae RH |
297 | if (!DECL_P (t)) |
298 | return t; | |
299 | ||
3c2a7a6a RH |
300 | /* Remap types, if necessary. */ |
301 | TREE_TYPE (t) = remap_type (TREE_TYPE (t), id); | |
302 | if (TREE_CODE (t) == TYPE_DECL) | |
303 | DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id); | |
3c2a7a6a RH |
304 | |
305 | /* Remap sizes as necessary. */ | |
726a989a RB |
306 | walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL); |
307 | walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL); | |
d4e4baa9 | 308 | |
8c27b7d4 | 309 | /* If fields, do likewise for offset and qualifier. */ |
5377d5ba RK |
310 | if (TREE_CODE (t) == FIELD_DECL) |
311 | { | |
726a989a | 312 | walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL); |
5377d5ba | 313 | if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE) |
726a989a | 314 | walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL); |
5377d5ba RK |
315 | } |
316 | ||
110cfe1c JH |
317 | if (cfun && gimple_in_ssa_p (cfun) |
318 | && (TREE_CODE (t) == VAR_DECL | |
319 | || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL)) | |
320 | { | |
110cfe1c | 321 | get_var_ann (t); |
110cfe1c JH |
322 | add_referenced_var (t); |
323 | } | |
5e20bdd7 | 324 | return t; |
d4e4baa9 AO |
325 | } |
326 | ||
f82a627c EB |
327 | if (id->do_not_unshare) |
328 | return *n; | |
329 | else | |
330 | return unshare_expr (*n); | |
d4e4baa9 AO |
331 | } |
332 | ||
3c2a7a6a | 333 | static tree |
1b369fae | 334 | remap_type_1 (tree type, copy_body_data *id) |
3c2a7a6a | 335 | { |
82d6e6fc | 336 | tree new_tree, t; |
3c2a7a6a | 337 | |
ed397c43 RK |
338 | /* We do need a copy. build and register it now. If this is a pointer or |
339 | reference type, remap the designated type and make a new pointer or | |
340 | reference type. */ | |
341 | if (TREE_CODE (type) == POINTER_TYPE) | |
342 | { | |
82d6e6fc | 343 | new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id), |
ed397c43 RK |
344 | TYPE_MODE (type), |
345 | TYPE_REF_CAN_ALIAS_ALL (type)); | |
040c6d51 MM |
346 | if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type)) |
347 | new_tree = build_type_attribute_qual_variant (new_tree, | |
348 | TYPE_ATTRIBUTES (type), | |
349 | TYPE_QUALS (type)); | |
82d6e6fc KG |
350 | insert_decl_map (id, type, new_tree); |
351 | return new_tree; | |
ed397c43 RK |
352 | } |
353 | else if (TREE_CODE (type) == REFERENCE_TYPE) | |
354 | { | |
82d6e6fc | 355 | new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id), |
ed397c43 RK |
356 | TYPE_MODE (type), |
357 | TYPE_REF_CAN_ALIAS_ALL (type)); | |
040c6d51 MM |
358 | if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type)) |
359 | new_tree = build_type_attribute_qual_variant (new_tree, | |
360 | TYPE_ATTRIBUTES (type), | |
361 | TYPE_QUALS (type)); | |
82d6e6fc KG |
362 | insert_decl_map (id, type, new_tree); |
363 | return new_tree; | |
ed397c43 RK |
364 | } |
365 | else | |
82d6e6fc | 366 | new_tree = copy_node (type); |
ed397c43 | 367 | |
82d6e6fc | 368 | insert_decl_map (id, type, new_tree); |
3c2a7a6a RH |
369 | |
370 | /* This is a new type, not a copy of an old type. Need to reassociate | |
371 | variants. We can handle everything except the main variant lazily. */ | |
372 | t = TYPE_MAIN_VARIANT (type); | |
373 | if (type != t) | |
374 | { | |
375 | t = remap_type (t, id); | |
82d6e6fc KG |
376 | TYPE_MAIN_VARIANT (new_tree) = t; |
377 | TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t); | |
378 | TYPE_NEXT_VARIANT (t) = new_tree; | |
3c2a7a6a RH |
379 | } |
380 | else | |
381 | { | |
82d6e6fc KG |
382 | TYPE_MAIN_VARIANT (new_tree) = new_tree; |
383 | TYPE_NEXT_VARIANT (new_tree) = NULL; | |
3c2a7a6a RH |
384 | } |
385 | ||
596b98ce | 386 | if (TYPE_STUB_DECL (type)) |
82d6e6fc | 387 | TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id); |
596b98ce | 388 | |
3c2a7a6a | 389 | /* Lazily create pointer and reference types. */ |
82d6e6fc KG |
390 | TYPE_POINTER_TO (new_tree) = NULL; |
391 | TYPE_REFERENCE_TO (new_tree) = NULL; | |
3c2a7a6a | 392 | |
82d6e6fc | 393 | switch (TREE_CODE (new_tree)) |
3c2a7a6a RH |
394 | { |
395 | case INTEGER_TYPE: | |
396 | case REAL_TYPE: | |
325217ed | 397 | case FIXED_POINT_TYPE: |
3c2a7a6a RH |
398 | case ENUMERAL_TYPE: |
399 | case BOOLEAN_TYPE: | |
82d6e6fc | 400 | t = TYPE_MIN_VALUE (new_tree); |
3c2a7a6a | 401 | if (t && TREE_CODE (t) != INTEGER_CST) |
82d6e6fc | 402 | walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL); |
1c9766da | 403 | |
82d6e6fc | 404 | t = TYPE_MAX_VALUE (new_tree); |
3c2a7a6a | 405 | if (t && TREE_CODE (t) != INTEGER_CST) |
82d6e6fc KG |
406 | walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL); |
407 | return new_tree; | |
9f63daea | 408 | |
3c2a7a6a | 409 | case FUNCTION_TYPE: |
82d6e6fc KG |
410 | TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id); |
411 | walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL); | |
412 | return new_tree; | |
3c2a7a6a RH |
413 | |
414 | case ARRAY_TYPE: | |
82d6e6fc KG |
415 | TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id); |
416 | TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id); | |
3c2a7a6a RH |
417 | break; |
418 | ||
419 | case RECORD_TYPE: | |
420 | case UNION_TYPE: | |
421 | case QUAL_UNION_TYPE: | |
52dd234b RH |
422 | { |
423 | tree f, nf = NULL; | |
424 | ||
910ad8de | 425 | for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f)) |
52dd234b RH |
426 | { |
427 | t = remap_decl (f, id); | |
82d6e6fc | 428 | DECL_CONTEXT (t) = new_tree; |
910ad8de | 429 | DECL_CHAIN (t) = nf; |
52dd234b RH |
430 | nf = t; |
431 | } | |
82d6e6fc | 432 | TYPE_FIELDS (new_tree) = nreverse (nf); |
52dd234b | 433 | } |
3c2a7a6a RH |
434 | break; |
435 | ||
3c2a7a6a RH |
436 | case OFFSET_TYPE: |
437 | default: | |
438 | /* Shouldn't have been thought variable sized. */ | |
1e128c5f | 439 | gcc_unreachable (); |
3c2a7a6a RH |
440 | } |
441 | ||
82d6e6fc KG |
442 | walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL); |
443 | walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL); | |
3c2a7a6a | 444 | |
82d6e6fc | 445 | return new_tree; |
3c2a7a6a RH |
446 | } |
447 | ||
1b369fae RH |
448 | tree |
449 | remap_type (tree type, copy_body_data *id) | |
52dd234b | 450 | { |
6be42dd4 | 451 | tree *node; |
4f5c64b8 | 452 | tree tmp; |
52dd234b RH |
453 | |
454 | if (type == NULL) | |
455 | return type; | |
456 | ||
457 | /* See if we have remapped this type. */ | |
6be42dd4 | 458 | node = (tree *) pointer_map_contains (id->decl_map, type); |
52dd234b | 459 | if (node) |
6be42dd4 | 460 | return *node; |
52dd234b RH |
461 | |
462 | /* The type only needs remapping if it's variably modified. */ | |
1b369fae | 463 | if (! variably_modified_type_p (type, id->src_fn)) |
52dd234b RH |
464 | { |
465 | insert_decl_map (id, type, type); | |
466 | return type; | |
467 | } | |
468 | ||
4f5c64b8 RG |
469 | id->remapping_type_depth++; |
470 | tmp = remap_type_1 (type, id); | |
471 | id->remapping_type_depth--; | |
472 | ||
473 | return tmp; | |
52dd234b RH |
474 | } |
475 | ||
13e4e36e L |
476 | /* Return previously remapped type of TYPE in ID. Return NULL if TYPE |
477 | is NULL or TYPE has not been remapped before. */ | |
478 | ||
479 | static tree | |
480 | remapped_type (tree type, copy_body_data *id) | |
481 | { | |
482 | tree *node; | |
483 | ||
484 | if (type == NULL) | |
485 | return type; | |
486 | ||
487 | /* See if we have remapped this type. */ | |
488 | node = (tree *) pointer_map_contains (id->decl_map, type); | |
489 | if (node) | |
490 | return *node; | |
491 | else | |
492 | return NULL; | |
493 | } | |
494 | ||
495 | /* The type only needs remapping if it's variably modified. */ | |
526d73ab | 496 | /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */ |
b8698a0f | 497 | |
526d73ab JH |
498 | static bool |
499 | can_be_nonlocal (tree decl, copy_body_data *id) | |
500 | { | |
501 | /* We can not duplicate function decls. */ | |
502 | if (TREE_CODE (decl) == FUNCTION_DECL) | |
503 | return true; | |
504 | ||
505 | /* Local static vars must be non-local or we get multiple declaration | |
506 | problems. */ | |
507 | if (TREE_CODE (decl) == VAR_DECL | |
508 | && !auto_var_in_fn_p (decl, id->src_fn)) | |
509 | return true; | |
510 | ||
511 | /* At the moment dwarf2out can handle only these types of nodes. We | |
512 | can support more later. */ | |
513 | if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL) | |
514 | return false; | |
515 | ||
13e4e36e L |
516 | /* We must use global type. We call remapped_type instead of |
517 | remap_type since we don't want to remap this type here if it | |
518 | hasn't been remapped before. */ | |
519 | if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id)) | |
526d73ab JH |
520 | return false; |
521 | ||
522 | /* Wihtout SSA we can't tell if variable is used. */ | |
523 | if (!gimple_in_ssa_p (cfun)) | |
524 | return false; | |
525 | ||
526 | /* Live variables must be copied so we can attach DECL_RTL. */ | |
527 | if (var_ann (decl)) | |
528 | return false; | |
529 | ||
530 | return true; | |
531 | } | |
532 | ||
6de9cd9a | 533 | static tree |
526d73ab | 534 | remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id) |
d4e4baa9 | 535 | { |
6de9cd9a DN |
536 | tree old_var; |
537 | tree new_decls = NULL_TREE; | |
d4e4baa9 | 538 | |
6de9cd9a | 539 | /* Remap its variables. */ |
910ad8de | 540 | for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var)) |
d4e4baa9 | 541 | { |
6de9cd9a DN |
542 | tree new_var; |
543 | ||
526d73ab | 544 | if (can_be_nonlocal (old_var, id)) |
30be951a | 545 | { |
526d73ab | 546 | if (TREE_CODE (old_var) == VAR_DECL |
5c3ec539 | 547 | && ! DECL_EXTERNAL (old_var) |
526d73ab | 548 | && (var_ann (old_var) || !gimple_in_ssa_p (cfun))) |
c021f10b | 549 | add_local_decl (cfun, old_var); |
9e6aced0 | 550 | if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE) |
526d73ab JH |
551 | && !DECL_IGNORED_P (old_var) |
552 | && nonlocalized_list) | |
70235ab9 | 553 | VEC_safe_push (tree, gc, *nonlocalized_list, old_var); |
30be951a JH |
554 | continue; |
555 | } | |
556 | ||
6de9cd9a DN |
557 | /* Remap the variable. */ |
558 | new_var = remap_decl (old_var, id); | |
559 | ||
726a989a | 560 | /* If we didn't remap this variable, we can't mess with its |
6de9cd9a DN |
561 | TREE_CHAIN. If we remapped this variable to the return slot, it's |
562 | already declared somewhere else, so don't declare it here. */ | |
b8698a0f | 563 | |
526d73ab | 564 | if (new_var == id->retvar) |
6de9cd9a | 565 | ; |
526d73ab JH |
566 | else if (!new_var) |
567 | { | |
9e6aced0 | 568 | if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE) |
526d73ab JH |
569 | && !DECL_IGNORED_P (old_var) |
570 | && nonlocalized_list) | |
70235ab9 | 571 | VEC_safe_push (tree, gc, *nonlocalized_list, old_var); |
526d73ab | 572 | } |
d4e4baa9 AO |
573 | else |
574 | { | |
1e128c5f | 575 | gcc_assert (DECL_P (new_var)); |
910ad8de | 576 | DECL_CHAIN (new_var) = new_decls; |
6de9cd9a | 577 | new_decls = new_var; |
60a5d78a JJ |
578 | |
579 | /* Also copy value-expressions. */ | |
580 | if (TREE_CODE (new_var) == VAR_DECL | |
581 | && DECL_HAS_VALUE_EXPR_P (new_var)) | |
582 | { | |
583 | tree tem = DECL_VALUE_EXPR (new_var); | |
584 | bool old_regimplify = id->regimplify; | |
585 | id->remapping_type_depth++; | |
586 | walk_tree (&tem, copy_tree_body_r, id, NULL); | |
587 | id->remapping_type_depth--; | |
588 | id->regimplify = old_regimplify; | |
589 | SET_DECL_VALUE_EXPR (new_var, tem); | |
590 | } | |
d4e4baa9 | 591 | } |
d4e4baa9 | 592 | } |
d4e4baa9 | 593 | |
6de9cd9a DN |
594 | return nreverse (new_decls); |
595 | } | |
596 | ||
597 | /* Copy the BLOCK to contain remapped versions of the variables | |
598 | therein. And hook the new block into the block-tree. */ | |
599 | ||
600 | static void | |
1b369fae | 601 | remap_block (tree *block, copy_body_data *id) |
6de9cd9a | 602 | { |
d436bff8 AH |
603 | tree old_block; |
604 | tree new_block; | |
d436bff8 AH |
605 | |
606 | /* Make the new block. */ | |
607 | old_block = *block; | |
608 | new_block = make_node (BLOCK); | |
609 | TREE_USED (new_block) = TREE_USED (old_block); | |
610 | BLOCK_ABSTRACT_ORIGIN (new_block) = old_block; | |
3e2844cb | 611 | BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block); |
526d73ab JH |
612 | BLOCK_NONLOCALIZED_VARS (new_block) |
613 | = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block)); | |
d436bff8 AH |
614 | *block = new_block; |
615 | ||
616 | /* Remap its variables. */ | |
526d73ab JH |
617 | BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), |
618 | &BLOCK_NONLOCALIZED_VARS (new_block), | |
619 | id); | |
d436bff8 | 620 | |
1b369fae | 621 | if (id->transform_lang_insert_block) |
9ff420f1 | 622 | id->transform_lang_insert_block (new_block); |
1b369fae | 623 | |
d436bff8 | 624 | /* Remember the remapped block. */ |
6de9cd9a | 625 | insert_decl_map (id, old_block, new_block); |
d4e4baa9 AO |
626 | } |
627 | ||
acb8f212 JH |
628 | /* Copy the whole block tree and root it in id->block. */ |
629 | static tree | |
1b369fae | 630 | remap_blocks (tree block, copy_body_data *id) |
acb8f212 JH |
631 | { |
632 | tree t; | |
82d6e6fc | 633 | tree new_tree = block; |
acb8f212 JH |
634 | |
635 | if (!block) | |
636 | return NULL; | |
637 | ||
82d6e6fc KG |
638 | remap_block (&new_tree, id); |
639 | gcc_assert (new_tree != block); | |
acb8f212 | 640 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) |
4a283090 JH |
641 | prepend_lexical_block (new_tree, remap_blocks (t, id)); |
642 | /* Blocks are in arbitrary order, but make things slightly prettier and do | |
643 | not swap order when producing a copy. */ | |
644 | BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree)); | |
82d6e6fc | 645 | return new_tree; |
acb8f212 JH |
646 | } |
647 | ||
d4e4baa9 | 648 | static void |
6de9cd9a | 649 | copy_statement_list (tree *tp) |
d4e4baa9 | 650 | { |
6de9cd9a | 651 | tree_stmt_iterator oi, ni; |
82d6e6fc | 652 | tree new_tree; |
6de9cd9a | 653 | |
82d6e6fc KG |
654 | new_tree = alloc_stmt_list (); |
655 | ni = tsi_start (new_tree); | |
6de9cd9a | 656 | oi = tsi_start (*tp); |
b1d82db0 | 657 | TREE_TYPE (new_tree) = TREE_TYPE (*tp); |
82d6e6fc | 658 | *tp = new_tree; |
6de9cd9a DN |
659 | |
660 | for (; !tsi_end_p (oi); tsi_next (&oi)) | |
a406865a RG |
661 | { |
662 | tree stmt = tsi_stmt (oi); | |
663 | if (TREE_CODE (stmt) == STATEMENT_LIST) | |
664 | copy_statement_list (&stmt); | |
665 | tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING); | |
666 | } | |
6de9cd9a | 667 | } |
d4e4baa9 | 668 | |
6de9cd9a | 669 | static void |
1b369fae | 670 | copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id) |
6de9cd9a DN |
671 | { |
672 | tree block = BIND_EXPR_BLOCK (*tp); | |
d4e4baa9 AO |
673 | /* Copy (and replace) the statement. */ |
674 | copy_tree_r (tp, walk_subtrees, NULL); | |
6de9cd9a DN |
675 | if (block) |
676 | { | |
677 | remap_block (&block, id); | |
678 | BIND_EXPR_BLOCK (*tp) = block; | |
679 | } | |
d4e4baa9 | 680 | |
6de9cd9a | 681 | if (BIND_EXPR_VARS (*tp)) |
60a5d78a JJ |
682 | /* This will remap a lot of the same decls again, but this should be |
683 | harmless. */ | |
684 | BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id); | |
d4e4baa9 AO |
685 | } |
686 | ||
726a989a RB |
687 | |
688 | /* Create a new gimple_seq by remapping all the statements in BODY | |
689 | using the inlining information in ID. */ | |
690 | ||
b34fd25c | 691 | static gimple_seq |
726a989a RB |
692 | remap_gimple_seq (gimple_seq body, copy_body_data *id) |
693 | { | |
694 | gimple_stmt_iterator si; | |
695 | gimple_seq new_body = NULL; | |
696 | ||
697 | for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si)) | |
698 | { | |
699 | gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id); | |
700 | gimple_seq_add_stmt (&new_body, new_stmt); | |
701 | } | |
702 | ||
703 | return new_body; | |
704 | } | |
705 | ||
706 | ||
707 | /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its | |
708 | block using the mapping information in ID. */ | |
709 | ||
710 | static gimple | |
711 | copy_gimple_bind (gimple stmt, copy_body_data *id) | |
712 | { | |
713 | gimple new_bind; | |
714 | tree new_block, new_vars; | |
715 | gimple_seq body, new_body; | |
716 | ||
717 | /* Copy the statement. Note that we purposely don't use copy_stmt | |
718 | here because we need to remap statements as we copy. */ | |
719 | body = gimple_bind_body (stmt); | |
720 | new_body = remap_gimple_seq (body, id); | |
721 | ||
722 | new_block = gimple_bind_block (stmt); | |
723 | if (new_block) | |
724 | remap_block (&new_block, id); | |
725 | ||
726 | /* This will remap a lot of the same decls again, but this should be | |
727 | harmless. */ | |
728 | new_vars = gimple_bind_vars (stmt); | |
729 | if (new_vars) | |
526d73ab | 730 | new_vars = remap_decls (new_vars, NULL, id); |
726a989a RB |
731 | |
732 | new_bind = gimple_build_bind (new_vars, new_body, new_block); | |
733 | ||
734 | return new_bind; | |
735 | } | |
736 | ||
737 | ||
738 | /* Remap the GIMPLE operand pointed to by *TP. DATA is really a | |
739 | 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'. | |
740 | WALK_SUBTREES is used to indicate walk_gimple_op whether to keep | |
741 | recursing into the children nodes of *TP. */ | |
742 | ||
743 | static tree | |
744 | remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data) | |
745 | { | |
746 | struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data; | |
747 | copy_body_data *id = (copy_body_data *) wi_p->info; | |
748 | tree fn = id->src_fn; | |
749 | ||
750 | if (TREE_CODE (*tp) == SSA_NAME) | |
751 | { | |
752 | *tp = remap_ssa_name (*tp, id); | |
753 | *walk_subtrees = 0; | |
754 | return NULL; | |
755 | } | |
756 | else if (auto_var_in_fn_p (*tp, fn)) | |
757 | { | |
758 | /* Local variables and labels need to be replaced by equivalent | |
759 | variables. We don't want to copy static variables; there's | |
760 | only one of those, no matter how many times we inline the | |
761 | containing function. Similarly for globals from an outer | |
762 | function. */ | |
763 | tree new_decl; | |
764 | ||
765 | /* Remap the declaration. */ | |
766 | new_decl = remap_decl (*tp, id); | |
767 | gcc_assert (new_decl); | |
768 | /* Replace this variable with the copy. */ | |
769 | STRIP_TYPE_NOPS (new_decl); | |
211ca15c RG |
770 | /* ??? The C++ frontend uses void * pointer zero to initialize |
771 | any other type. This confuses the middle-end type verification. | |
772 | As cloned bodies do not go through gimplification again the fixup | |
773 | there doesn't trigger. */ | |
774 | if (TREE_CODE (new_decl) == INTEGER_CST | |
775 | && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl))) | |
776 | new_decl = fold_convert (TREE_TYPE (*tp), new_decl); | |
726a989a RB |
777 | *tp = new_decl; |
778 | *walk_subtrees = 0; | |
779 | } | |
780 | else if (TREE_CODE (*tp) == STATEMENT_LIST) | |
781 | gcc_unreachable (); | |
782 | else if (TREE_CODE (*tp) == SAVE_EXPR) | |
783 | gcc_unreachable (); | |
784 | else if (TREE_CODE (*tp) == LABEL_DECL | |
785 | && (!DECL_CONTEXT (*tp) | |
786 | || decl_function_context (*tp) == id->src_fn)) | |
787 | /* These may need to be remapped for EH handling. */ | |
788 | *tp = remap_decl (*tp, id); | |
789 | else if (TYPE_P (*tp)) | |
790 | /* Types may need remapping as well. */ | |
791 | *tp = remap_type (*tp, id); | |
792 | else if (CONSTANT_CLASS_P (*tp)) | |
793 | { | |
794 | /* If this is a constant, we have to copy the node iff the type | |
795 | will be remapped. copy_tree_r will not copy a constant. */ | |
796 | tree new_type = remap_type (TREE_TYPE (*tp), id); | |
797 | ||
798 | if (new_type == TREE_TYPE (*tp)) | |
799 | *walk_subtrees = 0; | |
800 | ||
801 | else if (TREE_CODE (*tp) == INTEGER_CST) | |
802 | *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp), | |
803 | TREE_INT_CST_HIGH (*tp)); | |
804 | else | |
805 | { | |
806 | *tp = copy_node (*tp); | |
807 | TREE_TYPE (*tp) = new_type; | |
808 | } | |
809 | } | |
810 | else | |
811 | { | |
812 | /* Otherwise, just copy the node. Note that copy_tree_r already | |
813 | knows not to copy VAR_DECLs, etc., so this is safe. */ | |
70f34814 | 814 | if (TREE_CODE (*tp) == MEM_REF) |
726a989a | 815 | { |
70f34814 RG |
816 | /* We need to re-canonicalize MEM_REFs from inline substitutions |
817 | that can happen when a pointer argument is an ADDR_EXPR. */ | |
726a989a RB |
818 | tree decl = TREE_OPERAND (*tp, 0); |
819 | tree *n; | |
820 | ||
6938f93f JH |
821 | /* See remap_ssa_name. */ |
822 | if (TREE_CODE (decl) == SSA_NAME | |
823 | && TREE_CODE (SSA_NAME_VAR (decl)) == RESULT_DECL | |
824 | && id->transform_return_to_modify) | |
825 | decl = SSA_NAME_VAR (decl); | |
826 | ||
726a989a RB |
827 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
828 | if (n) | |
829 | { | |
70f34814 RG |
830 | tree old = *tp; |
831 | tree ptr = unshare_expr (*n); | |
832 | tree tem; | |
833 | if ((tem = maybe_fold_offset_to_reference (EXPR_LOCATION (*tp), | |
834 | ptr, | |
835 | TREE_OPERAND (*tp, 1), | |
836 | TREE_TYPE (*tp))) | |
837 | && TREE_THIS_VOLATILE (tem) == TREE_THIS_VOLATILE (old)) | |
838 | { | |
839 | tree *tem_basep = &tem; | |
840 | while (handled_component_p (*tem_basep)) | |
841 | tem_basep = &TREE_OPERAND (*tem_basep, 0); | |
842 | if (TREE_CODE (*tem_basep) == MEM_REF) | |
843 | *tem_basep | |
844 | = build2 (MEM_REF, TREE_TYPE (*tem_basep), | |
845 | TREE_OPERAND (*tem_basep, 0), | |
846 | fold_convert (TREE_TYPE (TREE_OPERAND (*tp, 1)), | |
847 | TREE_OPERAND (*tem_basep, 1))); | |
848 | else | |
849 | *tem_basep | |
850 | = build2 (MEM_REF, TREE_TYPE (*tem_basep), | |
851 | build_fold_addr_expr (*tem_basep), | |
852 | build_int_cst | |
853 | (TREE_TYPE (TREE_OPERAND (*tp, 1)), 0)); | |
854 | *tp = tem; | |
855 | } | |
856 | else | |
857 | { | |
858 | *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp), | |
859 | ptr, TREE_OPERAND (*tp, 1)); | |
860 | TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); | |
726a989a | 861 | } |
70f34814 | 862 | TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old); |
726a989a RB |
863 | *walk_subtrees = 0; |
864 | return NULL; | |
865 | } | |
866 | } | |
867 | ||
868 | /* Here is the "usual case". Copy this tree node, and then | |
869 | tweak some special cases. */ | |
870 | copy_tree_r (tp, walk_subtrees, NULL); | |
871 | ||
872 | /* Global variables we haven't seen yet need to go into referenced | |
873 | vars. If not referenced from types only. */ | |
874 | if (gimple_in_ssa_p (cfun) | |
875 | && TREE_CODE (*tp) == VAR_DECL | |
b5b8b0ac AO |
876 | && id->remapping_type_depth == 0 |
877 | && !processing_debug_stmt) | |
726a989a RB |
878 | add_referenced_var (*tp); |
879 | ||
880 | /* We should never have TREE_BLOCK set on non-statements. */ | |
881 | if (EXPR_P (*tp)) | |
882 | gcc_assert (!TREE_BLOCK (*tp)); | |
883 | ||
884 | if (TREE_CODE (*tp) != OMP_CLAUSE) | |
885 | TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id); | |
886 | ||
887 | if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3)) | |
888 | { | |
889 | /* The copied TARGET_EXPR has never been expanded, even if the | |
890 | original node was expanded already. */ | |
891 | TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3); | |
892 | TREE_OPERAND (*tp, 3) = NULL_TREE; | |
893 | } | |
894 | else if (TREE_CODE (*tp) == ADDR_EXPR) | |
895 | { | |
896 | /* Variable substitution need not be simple. In particular, | |
70f34814 | 897 | the MEM_REF substitution above. Make sure that |
726a989a RB |
898 | TREE_CONSTANT and friends are up-to-date. But make sure |
899 | to not improperly set TREE_BLOCK on some sub-expressions. */ | |
900 | int invariant = is_gimple_min_invariant (*tp); | |
901 | tree block = id->block; | |
902 | id->block = NULL_TREE; | |
f1071b12 | 903 | walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL); |
726a989a | 904 | id->block = block; |
70f34814 | 905 | recompute_tree_invariant_for_addr_expr (*tp); |
726a989a RB |
906 | |
907 | /* If this used to be invariant, but is not any longer, | |
908 | then regimplification is probably needed. */ | |
909 | if (invariant && !is_gimple_min_invariant (*tp)) | |
910 | id->regimplify = true; | |
911 | ||
912 | *walk_subtrees = 0; | |
913 | } | |
914 | } | |
915 | ||
916 | /* Keep iterating. */ | |
917 | return NULL_TREE; | |
918 | } | |
919 | ||
920 | ||
921 | /* Called from copy_body_id via walk_tree. DATA is really a | |
1b369fae | 922 | `copy_body_data *'. */ |
aa4a53af | 923 | |
1b369fae | 924 | tree |
726a989a | 925 | copy_tree_body_r (tree *tp, int *walk_subtrees, void *data) |
d4e4baa9 | 926 | { |
1b369fae RH |
927 | copy_body_data *id = (copy_body_data *) data; |
928 | tree fn = id->src_fn; | |
acb8f212 | 929 | tree new_block; |
d4e4baa9 | 930 | |
e21aff8a SB |
931 | /* Begin by recognizing trees that we'll completely rewrite for the |
932 | inlining context. Our output for these trees is completely | |
933 | different from out input (e.g. RETURN_EXPR is deleted, and morphs | |
934 | into an edge). Further down, we'll handle trees that get | |
935 | duplicated and/or tweaked. */ | |
d4e4baa9 | 936 | |
1b369fae | 937 | /* When requested, RETURN_EXPRs should be transformed to just the |
726a989a | 938 | contained MODIFY_EXPR. The branch semantics of the return will |
1b369fae RH |
939 | be handled elsewhere by manipulating the CFG rather than a statement. */ |
940 | if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify) | |
d4e4baa9 | 941 | { |
e21aff8a | 942 | tree assignment = TREE_OPERAND (*tp, 0); |
d4e4baa9 AO |
943 | |
944 | /* If we're returning something, just turn that into an | |
e21aff8a SB |
945 | assignment into the equivalent of the original RESULT_DECL. |
946 | If the "assignment" is just the result decl, the result | |
947 | decl has already been set (e.g. a recent "foo (&result_decl, | |
948 | ...)"); just toss the entire RETURN_EXPR. */ | |
726a989a | 949 | if (assignment && TREE_CODE (assignment) == MODIFY_EXPR) |
e21aff8a SB |
950 | { |
951 | /* Replace the RETURN_EXPR with (a copy of) the | |
726a989a | 952 | MODIFY_EXPR hanging underneath. */ |
e21aff8a SB |
953 | *tp = copy_node (assignment); |
954 | } | |
955 | else /* Else the RETURN_EXPR returns no value. */ | |
956 | { | |
957 | *tp = NULL; | |
cceb1885 | 958 | return (tree) (void *)1; |
e21aff8a | 959 | } |
d4e4baa9 | 960 | } |
110cfe1c JH |
961 | else if (TREE_CODE (*tp) == SSA_NAME) |
962 | { | |
963 | *tp = remap_ssa_name (*tp, id); | |
964 | *walk_subtrees = 0; | |
965 | return NULL; | |
966 | } | |
e21aff8a | 967 | |
d4e4baa9 AO |
968 | /* Local variables and labels need to be replaced by equivalent |
969 | variables. We don't want to copy static variables; there's only | |
970 | one of those, no matter how many times we inline the containing | |
5377d5ba | 971 | function. Similarly for globals from an outer function. */ |
50886bf1 | 972 | else if (auto_var_in_fn_p (*tp, fn)) |
d4e4baa9 AO |
973 | { |
974 | tree new_decl; | |
975 | ||
976 | /* Remap the declaration. */ | |
977 | new_decl = remap_decl (*tp, id); | |
1e128c5f | 978 | gcc_assert (new_decl); |
d4e4baa9 AO |
979 | /* Replace this variable with the copy. */ |
980 | STRIP_TYPE_NOPS (new_decl); | |
981 | *tp = new_decl; | |
e4cf29ae | 982 | *walk_subtrees = 0; |
d4e4baa9 | 983 | } |
6de9cd9a DN |
984 | else if (TREE_CODE (*tp) == STATEMENT_LIST) |
985 | copy_statement_list (tp); | |
a406865a RG |
986 | else if (TREE_CODE (*tp) == SAVE_EXPR |
987 | || TREE_CODE (*tp) == TARGET_EXPR) | |
82c82743 | 988 | remap_save_expr (tp, id->decl_map, walk_subtrees); |
17acc01a JH |
989 | else if (TREE_CODE (*tp) == LABEL_DECL |
990 | && (! DECL_CONTEXT (*tp) | |
1b369fae | 991 | || decl_function_context (*tp) == id->src_fn)) |
e21aff8a | 992 | /* These may need to be remapped for EH handling. */ |
17acc01a | 993 | *tp = remap_decl (*tp, id); |
6de9cd9a DN |
994 | else if (TREE_CODE (*tp) == BIND_EXPR) |
995 | copy_bind_expr (tp, walk_subtrees, id); | |
3c2a7a6a RH |
996 | /* Types may need remapping as well. */ |
997 | else if (TYPE_P (*tp)) | |
998 | *tp = remap_type (*tp, id); | |
999 | ||
bb04998a RK |
1000 | /* If this is a constant, we have to copy the node iff the type will be |
1001 | remapped. copy_tree_r will not copy a constant. */ | |
3cf11075 | 1002 | else if (CONSTANT_CLASS_P (*tp)) |
bb04998a RK |
1003 | { |
1004 | tree new_type = remap_type (TREE_TYPE (*tp), id); | |
1005 | ||
1006 | if (new_type == TREE_TYPE (*tp)) | |
1007 | *walk_subtrees = 0; | |
1008 | ||
1009 | else if (TREE_CODE (*tp) == INTEGER_CST) | |
1010 | *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp), | |
1011 | TREE_INT_CST_HIGH (*tp)); | |
1012 | else | |
1013 | { | |
1014 | *tp = copy_node (*tp); | |
1015 | TREE_TYPE (*tp) = new_type; | |
1016 | } | |
1017 | } | |
1018 | ||
d4e4baa9 AO |
1019 | /* Otherwise, just copy the node. Note that copy_tree_r already |
1020 | knows not to copy VAR_DECLs, etc., so this is safe. */ | |
1021 | else | |
1022 | { | |
e21aff8a SB |
1023 | /* Here we handle trees that are not completely rewritten. |
1024 | First we detect some inlining-induced bogosities for | |
1025 | discarding. */ | |
726a989a RB |
1026 | if (TREE_CODE (*tp) == MODIFY_EXPR |
1027 | && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1) | |
1028 | && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn))) | |
d4e4baa9 AO |
1029 | { |
1030 | /* Some assignments VAR = VAR; don't generate any rtl code | |
1031 | and thus don't count as variable modification. Avoid | |
1032 | keeping bogosities like 0 = 0. */ | |
726a989a | 1033 | tree decl = TREE_OPERAND (*tp, 0), value; |
6be42dd4 | 1034 | tree *n; |
d4e4baa9 | 1035 | |
6be42dd4 | 1036 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
d4e4baa9 AO |
1037 | if (n) |
1038 | { | |
6be42dd4 | 1039 | value = *n; |
d4e4baa9 | 1040 | STRIP_TYPE_NOPS (value); |
becfd6e5 | 1041 | if (TREE_CONSTANT (value) || TREE_READONLY (value)) |
68594ce7 | 1042 | { |
c2255bc4 | 1043 | *tp = build_empty_stmt (EXPR_LOCATION (*tp)); |
726a989a | 1044 | return copy_tree_body_r (tp, walk_subtrees, data); |
68594ce7 | 1045 | } |
d4e4baa9 AO |
1046 | } |
1047 | } | |
1b369fae | 1048 | else if (TREE_CODE (*tp) == INDIRECT_REF) |
6de9cd9a DN |
1049 | { |
1050 | /* Get rid of *& from inline substitutions that can happen when a | |
1051 | pointer argument is an ADDR_EXPR. */ | |
81cfbbc2 | 1052 | tree decl = TREE_OPERAND (*tp, 0); |
6be42dd4 | 1053 | tree *n; |
6de9cd9a | 1054 | |
6be42dd4 | 1055 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
6de9cd9a DN |
1056 | if (n) |
1057 | { | |
82d6e6fc | 1058 | tree new_tree; |
d84b37b0 | 1059 | tree old; |
30d2e943 RG |
1060 | /* If we happen to get an ADDR_EXPR in n->value, strip |
1061 | it manually here as we'll eventually get ADDR_EXPRs | |
1062 | which lie about their types pointed to. In this case | |
1063 | build_fold_indirect_ref wouldn't strip the INDIRECT_REF, | |
095ecc24 RG |
1064 | but we absolutely rely on that. As fold_indirect_ref |
1065 | does other useful transformations, try that first, though. */ | |
6be42dd4 | 1066 | tree type = TREE_TYPE (TREE_TYPE (*n)); |
f82a627c EB |
1067 | if (id->do_not_unshare) |
1068 | new_tree = *n; | |
1069 | else | |
1070 | new_tree = unshare_expr (*n); | |
d84b37b0 | 1071 | old = *tp; |
82d6e6fc | 1072 | *tp = gimple_fold_indirect_ref (new_tree); |
095ecc24 RG |
1073 | if (! *tp) |
1074 | { | |
82d6e6fc | 1075 | if (TREE_CODE (new_tree) == ADDR_EXPR) |
de4af523 | 1076 | { |
db3927fb AH |
1077 | *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree), |
1078 | type, new_tree); | |
de4af523 JJ |
1079 | /* ??? We should either assert here or build |
1080 | a VIEW_CONVERT_EXPR instead of blindly leaking | |
1081 | incompatible types to our IL. */ | |
1082 | if (! *tp) | |
82d6e6fc | 1083 | *tp = TREE_OPERAND (new_tree, 0); |
de4af523 | 1084 | } |
095ecc24 | 1085 | else |
d84b37b0 | 1086 | { |
82d6e6fc | 1087 | *tp = build1 (INDIRECT_REF, type, new_tree); |
d84b37b0 | 1088 | TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); |
955f6531 | 1089 | TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old); |
d84b37b0 | 1090 | } |
095ecc24 | 1091 | } |
81cfbbc2 JH |
1092 | *walk_subtrees = 0; |
1093 | return NULL; | |
68594ce7 JM |
1094 | } |
1095 | } | |
70f34814 RG |
1096 | else if (TREE_CODE (*tp) == MEM_REF) |
1097 | { | |
1098 | /* We need to re-canonicalize MEM_REFs from inline substitutions | |
1099 | that can happen when a pointer argument is an ADDR_EXPR. */ | |
1100 | tree decl = TREE_OPERAND (*tp, 0); | |
1101 | tree *n; | |
1102 | ||
1103 | n = (tree *) pointer_map_contains (id->decl_map, decl); | |
1104 | if (n) | |
1105 | { | |
1106 | tree old = *tp; | |
1107 | *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp), | |
1108 | unshare_expr (*n), TREE_OPERAND (*tp, 1)); | |
1109 | TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); | |
1110 | TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old); | |
1111 | *walk_subtrees = 0; | |
1112 | return NULL; | |
1113 | } | |
1114 | } | |
68594ce7 | 1115 | |
e21aff8a SB |
1116 | /* Here is the "usual case". Copy this tree node, and then |
1117 | tweak some special cases. */ | |
1b369fae | 1118 | copy_tree_r (tp, walk_subtrees, NULL); |
110cfe1c | 1119 | |
4f5c64b8 | 1120 | /* Global variables we haven't seen yet needs to go into referenced |
b5b8b0ac | 1121 | vars. If not referenced from types or debug stmts only. */ |
726a989a RB |
1122 | if (gimple_in_ssa_p (cfun) |
1123 | && TREE_CODE (*tp) == VAR_DECL | |
b5b8b0ac AO |
1124 | && id->remapping_type_depth == 0 |
1125 | && !processing_debug_stmt) | |
110cfe1c | 1126 | add_referenced_var (*tp); |
b8698a0f | 1127 | |
acb8f212 JH |
1128 | /* If EXPR has block defined, map it to newly constructed block. |
1129 | When inlining we want EXPRs without block appear in the block | |
ee0192a2 | 1130 | of function call if we are not remapping a type. */ |
726a989a | 1131 | if (EXPR_P (*tp)) |
acb8f212 | 1132 | { |
ee0192a2 | 1133 | new_block = id->remapping_type_depth == 0 ? id->block : NULL; |
acb8f212 JH |
1134 | if (TREE_BLOCK (*tp)) |
1135 | { | |
6be42dd4 RG |
1136 | tree *n; |
1137 | n = (tree *) pointer_map_contains (id->decl_map, | |
1138 | TREE_BLOCK (*tp)); | |
60a5d78a JJ |
1139 | gcc_assert (n || id->remapping_type_depth != 0); |
1140 | if (n) | |
1141 | new_block = *n; | |
acb8f212 JH |
1142 | } |
1143 | TREE_BLOCK (*tp) = new_block; | |
1144 | } | |
68594ce7 | 1145 | |
726a989a | 1146 | if (TREE_CODE (*tp) != OMP_CLAUSE) |
07beea0d | 1147 | TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id); |
3c2a7a6a | 1148 | |
68594ce7 JM |
1149 | /* The copied TARGET_EXPR has never been expanded, even if the |
1150 | original node was expanded already. */ | |
1151 | if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3)) | |
1152 | { | |
1153 | TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3); | |
1154 | TREE_OPERAND (*tp, 3) = NULL_TREE; | |
1155 | } | |
84cce55d RH |
1156 | |
1157 | /* Variable substitution need not be simple. In particular, the | |
1158 | INDIRECT_REF substitution above. Make sure that TREE_CONSTANT | |
1159 | and friends are up-to-date. */ | |
1160 | else if (TREE_CODE (*tp) == ADDR_EXPR) | |
1161 | { | |
ad6003f2 | 1162 | int invariant = is_gimple_min_invariant (*tp); |
726a989a RB |
1163 | walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL); |
1164 | ||
8e85fd14 RG |
1165 | /* Handle the case where we substituted an INDIRECT_REF |
1166 | into the operand of the ADDR_EXPR. */ | |
1167 | if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF) | |
1168 | *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0); | |
1169 | else | |
1170 | recompute_tree_invariant_for_addr_expr (*tp); | |
726a989a | 1171 | |
416c991f JJ |
1172 | /* If this used to be invariant, but is not any longer, |
1173 | then regimplification is probably needed. */ | |
ad6003f2 | 1174 | if (invariant && !is_gimple_min_invariant (*tp)) |
416c991f | 1175 | id->regimplify = true; |
726a989a | 1176 | |
84cce55d RH |
1177 | *walk_subtrees = 0; |
1178 | } | |
d4e4baa9 AO |
1179 | } |
1180 | ||
1181 | /* Keep iterating. */ | |
1182 | return NULL_TREE; | |
1183 | } | |
1184 | ||
1d65f45c RH |
1185 | /* Helper for remap_gimple_stmt. Given an EH region number for the |
1186 | source function, map that to the duplicate EH region number in | |
1187 | the destination function. */ | |
1188 | ||
1189 | static int | |
1190 | remap_eh_region_nr (int old_nr, copy_body_data *id) | |
1191 | { | |
1192 | eh_region old_r, new_r; | |
1193 | void **slot; | |
1194 | ||
1195 | old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr); | |
1196 | slot = pointer_map_contains (id->eh_map, old_r); | |
1197 | new_r = (eh_region) *slot; | |
1198 | ||
1199 | return new_r->index; | |
1200 | } | |
1201 | ||
1202 | /* Similar, but operate on INTEGER_CSTs. */ | |
1203 | ||
1204 | static tree | |
1205 | remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id) | |
1206 | { | |
1207 | int old_nr, new_nr; | |
1208 | ||
1209 | old_nr = tree_low_cst (old_t_nr, 0); | |
1210 | new_nr = remap_eh_region_nr (old_nr, id); | |
1211 | ||
1212 | return build_int_cst (NULL, new_nr); | |
1213 | } | |
726a989a RB |
1214 | |
1215 | /* Helper for copy_bb. Remap statement STMT using the inlining | |
1216 | information in ID. Return the new statement copy. */ | |
1217 | ||
1218 | static gimple | |
1219 | remap_gimple_stmt (gimple stmt, copy_body_data *id) | |
1220 | { | |
1221 | gimple copy = NULL; | |
1222 | struct walk_stmt_info wi; | |
1223 | tree new_block; | |
5a6e26b7 | 1224 | bool skip_first = false; |
726a989a RB |
1225 | |
1226 | /* Begin by recognizing trees that we'll completely rewrite for the | |
1227 | inlining context. Our output for these trees is completely | |
1228 | different from out input (e.g. RETURN_EXPR is deleted, and morphs | |
1229 | into an edge). Further down, we'll handle trees that get | |
1230 | duplicated and/or tweaked. */ | |
1231 | ||
1232 | /* When requested, GIMPLE_RETURNs should be transformed to just the | |
1233 | contained GIMPLE_ASSIGN. The branch semantics of the return will | |
1234 | be handled elsewhere by manipulating the CFG rather than the | |
1235 | statement. */ | |
1236 | if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify) | |
1237 | { | |
1238 | tree retval = gimple_return_retval (stmt); | |
1239 | ||
1240 | /* If we're returning something, just turn that into an | |
1241 | assignment into the equivalent of the original RESULT_DECL. | |
1242 | If RETVAL is just the result decl, the result decl has | |
1243 | already been set (e.g. a recent "foo (&result_decl, ...)"); | |
1244 | just toss the entire GIMPLE_RETURN. */ | |
6938f93f JH |
1245 | if (retval |
1246 | && (TREE_CODE (retval) != RESULT_DECL | |
1247 | && (TREE_CODE (retval) != SSA_NAME | |
1248 | || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL))) | |
5a6e26b7 JH |
1249 | { |
1250 | copy = gimple_build_assign (id->retvar, retval); | |
1251 | /* id->retvar is already substituted. Skip it on later remapping. */ | |
1252 | skip_first = true; | |
1253 | } | |
726a989a RB |
1254 | else |
1255 | return gimple_build_nop (); | |
1256 | } | |
1257 | else if (gimple_has_substatements (stmt)) | |
1258 | { | |
1259 | gimple_seq s1, s2; | |
1260 | ||
1261 | /* When cloning bodies from the C++ front end, we will be handed bodies | |
1262 | in High GIMPLE form. Handle here all the High GIMPLE statements that | |
1263 | have embedded statements. */ | |
1264 | switch (gimple_code (stmt)) | |
1265 | { | |
1266 | case GIMPLE_BIND: | |
1267 | copy = copy_gimple_bind (stmt, id); | |
1268 | break; | |
1269 | ||
1270 | case GIMPLE_CATCH: | |
1271 | s1 = remap_gimple_seq (gimple_catch_handler (stmt), id); | |
1272 | copy = gimple_build_catch (gimple_catch_types (stmt), s1); | |
1273 | break; | |
1274 | ||
1275 | case GIMPLE_EH_FILTER: | |
1276 | s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id); | |
1277 | copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1); | |
1278 | break; | |
1279 | ||
1280 | case GIMPLE_TRY: | |
1281 | s1 = remap_gimple_seq (gimple_try_eval (stmt), id); | |
1282 | s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id); | |
b8698a0f | 1283 | copy = gimple_build_try (s1, s2, gimple_try_kind (stmt)); |
726a989a RB |
1284 | break; |
1285 | ||
1286 | case GIMPLE_WITH_CLEANUP_EXPR: | |
1287 | s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id); | |
1288 | copy = gimple_build_wce (s1); | |
1289 | break; | |
1290 | ||
1291 | case GIMPLE_OMP_PARALLEL: | |
1292 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1293 | copy = gimple_build_omp_parallel | |
1294 | (s1, | |
1295 | gimple_omp_parallel_clauses (stmt), | |
1296 | gimple_omp_parallel_child_fn (stmt), | |
1297 | gimple_omp_parallel_data_arg (stmt)); | |
1298 | break; | |
1299 | ||
1300 | case GIMPLE_OMP_TASK: | |
1301 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1302 | copy = gimple_build_omp_task | |
1303 | (s1, | |
1304 | gimple_omp_task_clauses (stmt), | |
1305 | gimple_omp_task_child_fn (stmt), | |
1306 | gimple_omp_task_data_arg (stmt), | |
1307 | gimple_omp_task_copy_fn (stmt), | |
1308 | gimple_omp_task_arg_size (stmt), | |
1309 | gimple_omp_task_arg_align (stmt)); | |
1310 | break; | |
1311 | ||
1312 | case GIMPLE_OMP_FOR: | |
1313 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1314 | s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id); | |
1315 | copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt), | |
1316 | gimple_omp_for_collapse (stmt), s2); | |
1317 | { | |
1318 | size_t i; | |
1319 | for (i = 0; i < gimple_omp_for_collapse (stmt); i++) | |
1320 | { | |
1321 | gimple_omp_for_set_index (copy, i, | |
1322 | gimple_omp_for_index (stmt, i)); | |
1323 | gimple_omp_for_set_initial (copy, i, | |
1324 | gimple_omp_for_initial (stmt, i)); | |
1325 | gimple_omp_for_set_final (copy, i, | |
1326 | gimple_omp_for_final (stmt, i)); | |
1327 | gimple_omp_for_set_incr (copy, i, | |
1328 | gimple_omp_for_incr (stmt, i)); | |
1329 | gimple_omp_for_set_cond (copy, i, | |
1330 | gimple_omp_for_cond (stmt, i)); | |
1331 | } | |
1332 | } | |
1333 | break; | |
1334 | ||
1335 | case GIMPLE_OMP_MASTER: | |
1336 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1337 | copy = gimple_build_omp_master (s1); | |
1338 | break; | |
1339 | ||
1340 | case GIMPLE_OMP_ORDERED: | |
1341 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1342 | copy = gimple_build_omp_ordered (s1); | |
1343 | break; | |
1344 | ||
1345 | case GIMPLE_OMP_SECTION: | |
1346 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1347 | copy = gimple_build_omp_section (s1); | |
1348 | break; | |
1349 | ||
1350 | case GIMPLE_OMP_SECTIONS: | |
1351 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1352 | copy = gimple_build_omp_sections | |
1353 | (s1, gimple_omp_sections_clauses (stmt)); | |
1354 | break; | |
1355 | ||
1356 | case GIMPLE_OMP_SINGLE: | |
1357 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1358 | copy = gimple_build_omp_single | |
1359 | (s1, gimple_omp_single_clauses (stmt)); | |
1360 | break; | |
1361 | ||
05a26161 JJ |
1362 | case GIMPLE_OMP_CRITICAL: |
1363 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1364 | copy | |
1365 | = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt)); | |
1366 | break; | |
1367 | ||
726a989a RB |
1368 | default: |
1369 | gcc_unreachable (); | |
1370 | } | |
1371 | } | |
1372 | else | |
1373 | { | |
1374 | if (gimple_assign_copy_p (stmt) | |
1375 | && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt) | |
1376 | && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn)) | |
1377 | { | |
1378 | /* Here we handle statements that are not completely rewritten. | |
1379 | First we detect some inlining-induced bogosities for | |
1380 | discarding. */ | |
1381 | ||
1382 | /* Some assignments VAR = VAR; don't generate any rtl code | |
1383 | and thus don't count as variable modification. Avoid | |
1384 | keeping bogosities like 0 = 0. */ | |
1385 | tree decl = gimple_assign_lhs (stmt), value; | |
1386 | tree *n; | |
1387 | ||
1388 | n = (tree *) pointer_map_contains (id->decl_map, decl); | |
1389 | if (n) | |
1390 | { | |
1391 | value = *n; | |
1392 | STRIP_TYPE_NOPS (value); | |
1393 | if (TREE_CONSTANT (value) || TREE_READONLY (value)) | |
1394 | return gimple_build_nop (); | |
1395 | } | |
1396 | } | |
1397 | ||
b5b8b0ac AO |
1398 | if (gimple_debug_bind_p (stmt)) |
1399 | { | |
1400 | copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt), | |
1401 | gimple_debug_bind_get_value (stmt), | |
1402 | stmt); | |
1403 | VEC_safe_push (gimple, heap, id->debug_stmts, copy); | |
1404 | return copy; | |
1405 | } | |
1d65f45c RH |
1406 | |
1407 | /* Create a new deep copy of the statement. */ | |
1408 | copy = gimple_copy (stmt); | |
1409 | ||
1410 | /* Remap the region numbers for __builtin_eh_{pointer,filter}, | |
1411 | RESX and EH_DISPATCH. */ | |
1412 | if (id->eh_map) | |
1413 | switch (gimple_code (copy)) | |
1414 | { | |
1415 | case GIMPLE_CALL: | |
1416 | { | |
1417 | tree r, fndecl = gimple_call_fndecl (copy); | |
1418 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
1419 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1420 | { | |
1421 | case BUILT_IN_EH_COPY_VALUES: | |
1422 | r = gimple_call_arg (copy, 1); | |
1423 | r = remap_eh_region_tree_nr (r, id); | |
1424 | gimple_call_set_arg (copy, 1, r); | |
1425 | /* FALLTHRU */ | |
1426 | ||
1427 | case BUILT_IN_EH_POINTER: | |
1428 | case BUILT_IN_EH_FILTER: | |
1429 | r = gimple_call_arg (copy, 0); | |
1430 | r = remap_eh_region_tree_nr (r, id); | |
1431 | gimple_call_set_arg (copy, 0, r); | |
1432 | break; | |
1433 | ||
1434 | default: | |
1435 | break; | |
1436 | } | |
d086d311 | 1437 | |
25a6a873 RG |
1438 | /* Reset alias info if we didn't apply measures to |
1439 | keep it valid over inlining by setting DECL_PT_UID. */ | |
1440 | if (!id->src_cfun->gimple_df | |
1441 | || !id->src_cfun->gimple_df->ipa_pta) | |
1442 | gimple_call_reset_alias_info (copy); | |
1d65f45c RH |
1443 | } |
1444 | break; | |
1445 | ||
1446 | case GIMPLE_RESX: | |
1447 | { | |
1448 | int r = gimple_resx_region (copy); | |
1449 | r = remap_eh_region_nr (r, id); | |
1450 | gimple_resx_set_region (copy, r); | |
1451 | } | |
1452 | break; | |
1453 | ||
1454 | case GIMPLE_EH_DISPATCH: | |
1455 | { | |
1456 | int r = gimple_eh_dispatch_region (copy); | |
1457 | r = remap_eh_region_nr (r, id); | |
1458 | gimple_eh_dispatch_set_region (copy, r); | |
1459 | } | |
1460 | break; | |
1461 | ||
1462 | default: | |
1463 | break; | |
1464 | } | |
726a989a RB |
1465 | } |
1466 | ||
1467 | /* If STMT has a block defined, map it to the newly constructed | |
1468 | block. When inlining we want statements without a block to | |
1469 | appear in the block of the function call. */ | |
1470 | new_block = id->block; | |
1471 | if (gimple_block (copy)) | |
1472 | { | |
1473 | tree *n; | |
1474 | n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy)); | |
1475 | gcc_assert (n); | |
1476 | new_block = *n; | |
1477 | } | |
1478 | ||
1479 | gimple_set_block (copy, new_block); | |
1480 | ||
b5b8b0ac AO |
1481 | if (gimple_debug_bind_p (copy)) |
1482 | return copy; | |
1483 | ||
726a989a RB |
1484 | /* Remap all the operands in COPY. */ |
1485 | memset (&wi, 0, sizeof (wi)); | |
1486 | wi.info = id; | |
5a6e26b7 JH |
1487 | if (skip_first) |
1488 | walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL); | |
1489 | else | |
b8698a0f | 1490 | walk_gimple_op (copy, remap_gimple_op_r, &wi); |
726a989a | 1491 | |
5006671f RG |
1492 | /* Clear the copied virtual operands. We are not remapping them here |
1493 | but are going to recreate them from scratch. */ | |
1494 | if (gimple_has_mem_ops (copy)) | |
1495 | { | |
1496 | gimple_set_vdef (copy, NULL_TREE); | |
1497 | gimple_set_vuse (copy, NULL_TREE); | |
1498 | } | |
1499 | ||
726a989a RB |
1500 | return copy; |
1501 | } | |
1502 | ||
1503 | ||
e21aff8a SB |
1504 | /* Copy basic block, scale profile accordingly. Edges will be taken care of |
1505 | later */ | |
1506 | ||
1507 | static basic_block | |
0178d644 VR |
1508 | copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, |
1509 | gcov_type count_scale) | |
e21aff8a | 1510 | { |
c2a4718a | 1511 | gimple_stmt_iterator gsi, copy_gsi, seq_gsi; |
e21aff8a | 1512 | basic_block copy_basic_block; |
726a989a | 1513 | tree decl; |
0d63a740 | 1514 | gcov_type freq; |
91382288 JH |
1515 | basic_block prev; |
1516 | ||
1517 | /* Search for previous copied basic block. */ | |
1518 | prev = bb->prev_bb; | |
1519 | while (!prev->aux) | |
1520 | prev = prev->prev_bb; | |
e21aff8a SB |
1521 | |
1522 | /* create_basic_block() will append every new block to | |
1523 | basic_block_info automatically. */ | |
cceb1885 | 1524 | copy_basic_block = create_basic_block (NULL, (void *) 0, |
91382288 | 1525 | (basic_block) prev->aux); |
e21aff8a | 1526 | copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE; |
45a80bb9 | 1527 | |
726a989a RB |
1528 | /* We are going to rebuild frequencies from scratch. These values |
1529 | have just small importance to drive canonicalize_loop_headers. */ | |
0d63a740 | 1530 | freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE); |
726a989a | 1531 | |
0d63a740 JH |
1532 | /* We recompute frequencies after inlining, so this is quite safe. */ |
1533 | if (freq > BB_FREQ_MAX) | |
1534 | freq = BB_FREQ_MAX; | |
1535 | copy_basic_block->frequency = freq; | |
e21aff8a | 1536 | |
726a989a RB |
1537 | copy_gsi = gsi_start_bb (copy_basic_block); |
1538 | ||
1539 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
e21aff8a | 1540 | { |
726a989a RB |
1541 | gimple stmt = gsi_stmt (gsi); |
1542 | gimple orig_stmt = stmt; | |
e21aff8a | 1543 | |
416c991f | 1544 | id->regimplify = false; |
726a989a RB |
1545 | stmt = remap_gimple_stmt (stmt, id); |
1546 | if (gimple_nop_p (stmt)) | |
1547 | continue; | |
1548 | ||
1549 | gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt); | |
c2a4718a | 1550 | seq_gsi = copy_gsi; |
726a989a RB |
1551 | |
1552 | /* With return slot optimization we can end up with | |
1553 | non-gimple (foo *)&this->m, fix that here. */ | |
4c29307d JJ |
1554 | if (is_gimple_assign (stmt) |
1555 | && gimple_assign_rhs_code (stmt) == NOP_EXPR | |
1556 | && !is_gimple_val (gimple_assign_rhs1 (stmt))) | |
e21aff8a | 1557 | { |
726a989a | 1558 | tree new_rhs; |
c2a4718a | 1559 | new_rhs = force_gimple_operand_gsi (&seq_gsi, |
4a2b7f24 JJ |
1560 | gimple_assign_rhs1 (stmt), |
1561 | true, NULL, false, GSI_NEW_STMT); | |
726a989a | 1562 | gimple_assign_set_rhs1 (stmt, new_rhs); |
c2a4718a | 1563 | id->regimplify = false; |
726a989a | 1564 | } |
2b65dae5 | 1565 | |
c2a4718a JJ |
1566 | gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT); |
1567 | ||
1568 | if (id->regimplify) | |
1569 | gimple_regimplify_operands (stmt, &seq_gsi); | |
1570 | ||
1571 | /* If copy_basic_block has been empty at the start of this iteration, | |
1572 | call gsi_start_bb again to get at the newly added statements. */ | |
1573 | if (gsi_end_p (copy_gsi)) | |
1574 | copy_gsi = gsi_start_bb (copy_basic_block); | |
1575 | else | |
1576 | gsi_next (©_gsi); | |
110cfe1c | 1577 | |
726a989a RB |
1578 | /* Process the new statement. The call to gimple_regimplify_operands |
1579 | possibly turned the statement into multiple statements, we | |
1580 | need to process all of them. */ | |
c2a4718a | 1581 | do |
726a989a | 1582 | { |
9187e02d JH |
1583 | tree fn; |
1584 | ||
c2a4718a | 1585 | stmt = gsi_stmt (copy_gsi); |
726a989a RB |
1586 | if (is_gimple_call (stmt) |
1587 | && gimple_call_va_arg_pack_p (stmt) | |
1588 | && id->gimple_call) | |
1589 | { | |
1590 | /* __builtin_va_arg_pack () should be replaced by | |
1591 | all arguments corresponding to ... in the caller. */ | |
1592 | tree p; | |
1593 | gimple new_call; | |
1594 | VEC(tree, heap) *argarray; | |
1595 | size_t nargs = gimple_call_num_args (id->gimple_call); | |
1596 | size_t n; | |
1597 | ||
910ad8de | 1598 | for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p)) |
726a989a RB |
1599 | nargs--; |
1600 | ||
1601 | /* Create the new array of arguments. */ | |
1602 | n = nargs + gimple_call_num_args (stmt); | |
1603 | argarray = VEC_alloc (tree, heap, n); | |
1604 | VEC_safe_grow (tree, heap, argarray, n); | |
1605 | ||
1606 | /* Copy all the arguments before '...' */ | |
1607 | memcpy (VEC_address (tree, argarray), | |
1608 | gimple_call_arg_ptr (stmt, 0), | |
1609 | gimple_call_num_args (stmt) * sizeof (tree)); | |
1610 | ||
1611 | /* Append the arguments passed in '...' */ | |
1612 | memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt), | |
1613 | gimple_call_arg_ptr (id->gimple_call, 0) | |
1614 | + (gimple_call_num_args (id->gimple_call) - nargs), | |
1615 | nargs * sizeof (tree)); | |
1616 | ||
1617 | new_call = gimple_build_call_vec (gimple_call_fn (stmt), | |
1618 | argarray); | |
1619 | ||
1620 | VEC_free (tree, heap, argarray); | |
1621 | ||
1622 | /* Copy all GIMPLE_CALL flags, location and block, except | |
1623 | GF_CALL_VA_ARG_PACK. */ | |
1624 | gimple_call_copy_flags (new_call, stmt); | |
1625 | gimple_call_set_va_arg_pack (new_call, false); | |
1626 | gimple_set_location (new_call, gimple_location (stmt)); | |
1627 | gimple_set_block (new_call, gimple_block (stmt)); | |
1628 | gimple_call_set_lhs (new_call, gimple_call_lhs (stmt)); | |
1629 | ||
1630 | gsi_replace (©_gsi, new_call, false); | |
1631 | stmt = new_call; | |
1632 | } | |
1633 | else if (is_gimple_call (stmt) | |
1634 | && id->gimple_call | |
1635 | && (decl = gimple_call_fndecl (stmt)) | |
1636 | && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL | |
1637 | && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN) | |
e0704a46 | 1638 | { |
726a989a RB |
1639 | /* __builtin_va_arg_pack_len () should be replaced by |
1640 | the number of anonymous arguments. */ | |
1641 | size_t nargs = gimple_call_num_args (id->gimple_call); | |
1642 | tree count, p; | |
1643 | gimple new_stmt; | |
1644 | ||
910ad8de | 1645 | for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p)) |
726a989a RB |
1646 | nargs--; |
1647 | ||
1648 | count = build_int_cst (integer_type_node, nargs); | |
1649 | new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count); | |
1650 | gsi_replace (©_gsi, new_stmt, false); | |
1651 | stmt = new_stmt; | |
1652 | } | |
b8a00a4d | 1653 | |
726a989a RB |
1654 | /* Statements produced by inlining can be unfolded, especially |
1655 | when we constant propagated some operands. We can't fold | |
1656 | them right now for two reasons: | |
1657 | 1) folding require SSA_NAME_DEF_STMTs to be correct | |
1658 | 2) we can't change function calls to builtins. | |
1659 | So we just mark statement for later folding. We mark | |
1660 | all new statements, instead just statements that has changed | |
1661 | by some nontrivial substitution so even statements made | |
1662 | foldable indirectly are updated. If this turns out to be | |
1663 | expensive, copy_body can be told to watch for nontrivial | |
1664 | changes. */ | |
1665 | if (id->statements_to_fold) | |
1666 | pointer_set_insert (id->statements_to_fold, stmt); | |
1667 | ||
1668 | /* We're duplicating a CALL_EXPR. Find any corresponding | |
1669 | callgraph edges and update or duplicate them. */ | |
1670 | if (is_gimple_call (stmt)) | |
1671 | { | |
9b2a5ef7 | 1672 | struct cgraph_edge *edge; |
f618d33e | 1673 | int flags; |
6ef5231b | 1674 | |
726a989a | 1675 | switch (id->transform_call_graph_edges) |
e0704a46 | 1676 | { |
9b2a5ef7 RH |
1677 | case CB_CGE_DUPLICATE: |
1678 | edge = cgraph_edge (id->src_node, orig_stmt); | |
1679 | if (edge) | |
0d63a740 JH |
1680 | { |
1681 | int edge_freq = edge->frequency; | |
1682 | edge = cgraph_clone_edge (edge, id->dst_node, stmt, | |
1683 | gimple_uid (stmt), | |
1684 | REG_BR_PROB_BASE, CGRAPH_FREQ_BASE, | |
1685 | edge->frequency, true); | |
1686 | /* We could also just rescale the frequency, but | |
1687 | doing so would introduce roundoff errors and make | |
1688 | verifier unhappy. */ | |
b8698a0f | 1689 | edge->frequency |
0d63a740 JH |
1690 | = compute_call_stmt_bb_frequency (id->dst_node->decl, |
1691 | copy_basic_block); | |
1692 | if (dump_file | |
1693 | && profile_status_for_function (cfun) != PROFILE_ABSENT | |
1694 | && (edge_freq > edge->frequency + 10 | |
1695 | || edge_freq < edge->frequency - 10)) | |
1696 | { | |
1697 | fprintf (dump_file, "Edge frequency estimated by " | |
1698 | "cgraph %i diverge from inliner's estimate %i\n", | |
1699 | edge_freq, | |
1700 | edge->frequency); | |
1701 | fprintf (dump_file, | |
1702 | "Orig bb: %i, orig bb freq %i, new bb freq %i\n", | |
1703 | bb->index, | |
1704 | bb->frequency, | |
1705 | copy_basic_block->frequency); | |
1706 | } | |
8132a837 | 1707 | stmt = cgraph_redirect_edge_call_stmt_to_callee (edge); |
0d63a740 | 1708 | } |
9b2a5ef7 RH |
1709 | break; |
1710 | ||
1711 | case CB_CGE_MOVE_CLONES: | |
1712 | cgraph_set_call_stmt_including_clones (id->dst_node, | |
1713 | orig_stmt, stmt); | |
1714 | edge = cgraph_edge (id->dst_node, stmt); | |
1715 | break; | |
1716 | ||
1717 | case CB_CGE_MOVE: | |
1718 | edge = cgraph_edge (id->dst_node, orig_stmt); | |
1719 | if (edge) | |
1720 | cgraph_set_call_stmt (edge, stmt); | |
1721 | break; | |
1722 | ||
1723 | default: | |
1724 | gcc_unreachable (); | |
110cfe1c | 1725 | } |
f618d33e | 1726 | |
9b2a5ef7 RH |
1727 | /* Constant propagation on argument done during inlining |
1728 | may create new direct call. Produce an edge for it. */ | |
b8698a0f | 1729 | if ((!edge |
e33c6cd6 | 1730 | || (edge->indirect_inlining_edge |
9b2a5ef7 | 1731 | && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)) |
9b2a5ef7 RH |
1732 | && (fn = gimple_call_fndecl (stmt)) != NULL) |
1733 | { | |
1734 | struct cgraph_node *dest = cgraph_node (fn); | |
1735 | ||
1736 | /* We have missing edge in the callgraph. This can happen | |
1737 | when previous inlining turned an indirect call into a | |
0e3776db | 1738 | direct call by constant propagating arguments or we are |
20a6bb58 | 1739 | producing dead clone (for further cloning). In all |
9b2a5ef7 RH |
1740 | other cases we hit a bug (incorrect node sharing is the |
1741 | most common reason for missing edges). */ | |
0e3776db | 1742 | gcc_assert (dest->needed || !dest->analyzed |
bd3cdcc0 | 1743 | || dest->address_taken |
0cac82a0 JH |
1744 | || !id->src_node->analyzed |
1745 | || !id->dst_node->analyzed); | |
9b2a5ef7 RH |
1746 | if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES) |
1747 | cgraph_create_edge_including_clones | |
47cb0d7d | 1748 | (id->dst_node, dest, orig_stmt, stmt, bb->count, |
b8698a0f | 1749 | compute_call_stmt_bb_frequency (id->dst_node->decl, |
0d63a740 | 1750 | copy_basic_block), |
9b2a5ef7 RH |
1751 | bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL); |
1752 | else | |
1753 | cgraph_create_edge (id->dst_node, dest, stmt, | |
47cb0d7d JH |
1754 | bb->count, |
1755 | compute_call_stmt_bb_frequency | |
1756 | (id->dst_node->decl, copy_basic_block), | |
9b2a5ef7 RH |
1757 | bb->loop_depth)->inline_failed |
1758 | = CIF_ORIGINALLY_INDIRECT_CALL; | |
1759 | if (dump_file) | |
1760 | { | |
91382288 | 1761 | fprintf (dump_file, "Created new direct edge to %s\n", |
9b2a5ef7 RH |
1762 | cgraph_node_name (dest)); |
1763 | } | |
1764 | } | |
9187e02d | 1765 | |
f618d33e | 1766 | flags = gimple_call_flags (stmt); |
f618d33e MJ |
1767 | if (flags & ECF_MAY_BE_ALLOCA) |
1768 | cfun->calls_alloca = true; | |
1769 | if (flags & ECF_RETURNS_TWICE) | |
1770 | cfun->calls_setjmp = true; | |
726a989a | 1771 | } |
e21aff8a | 1772 | |
1d65f45c RH |
1773 | maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt, |
1774 | id->eh_map, id->eh_lp_nr); | |
726a989a | 1775 | |
b5b8b0ac | 1776 | if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt)) |
726a989a RB |
1777 | { |
1778 | ssa_op_iter i; | |
1779 | tree def; | |
1780 | ||
1781 | find_new_referenced_vars (gsi_stmt (copy_gsi)); | |
1782 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF) | |
1783 | if (TREE_CODE (def) == SSA_NAME) | |
1784 | SSA_NAME_DEF_STMT (def) = stmt; | |
1785 | } | |
1786 | ||
1787 | gsi_next (©_gsi); | |
e21aff8a | 1788 | } |
c2a4718a | 1789 | while (!gsi_end_p (copy_gsi)); |
726a989a RB |
1790 | |
1791 | copy_gsi = gsi_last_bb (copy_basic_block); | |
e21aff8a | 1792 | } |
726a989a | 1793 | |
e21aff8a SB |
1794 | return copy_basic_block; |
1795 | } | |
1796 | ||
110cfe1c JH |
1797 | /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA |
1798 | form is quite easy, since dominator relationship for old basic blocks does | |
1799 | not change. | |
1800 | ||
1801 | There is however exception where inlining might change dominator relation | |
1802 | across EH edges from basic block within inlined functions destinating | |
5305a4cb | 1803 | to landing pads in function we inline into. |
110cfe1c | 1804 | |
e9705dc5 AO |
1805 | The function fills in PHI_RESULTs of such PHI nodes if they refer |
1806 | to gimple regs. Otherwise, the function mark PHI_RESULT of such | |
1807 | PHI nodes for renaming. For non-gimple regs, renaming is safe: the | |
1808 | EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be | |
1809 | set, and this means that there will be no overlapping live ranges | |
110cfe1c JH |
1810 | for the underlying symbol. |
1811 | ||
1812 | This might change in future if we allow redirecting of EH edges and | |
1813 | we might want to change way build CFG pre-inlining to include | |
1814 | all the possible edges then. */ | |
1815 | static void | |
e9705dc5 AO |
1816 | update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb, |
1817 | bool can_throw, bool nonlocal_goto) | |
110cfe1c JH |
1818 | { |
1819 | edge e; | |
1820 | edge_iterator ei; | |
1821 | ||
1822 | FOR_EACH_EDGE (e, ei, bb->succs) | |
1823 | if (!e->dest->aux | |
1824 | || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK) | |
1825 | { | |
726a989a RB |
1826 | gimple phi; |
1827 | gimple_stmt_iterator si; | |
110cfe1c | 1828 | |
e9705dc5 AO |
1829 | if (!nonlocal_goto) |
1830 | gcc_assert (e->flags & EDGE_EH); | |
726a989a | 1831 | |
e9705dc5 AO |
1832 | if (!can_throw) |
1833 | gcc_assert (!(e->flags & EDGE_EH)); | |
726a989a RB |
1834 | |
1835 | for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si)) | |
110cfe1c | 1836 | { |
e9705dc5 AO |
1837 | edge re; |
1838 | ||
726a989a RB |
1839 | phi = gsi_stmt (si); |
1840 | ||
e9705dc5 AO |
1841 | /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */ |
1842 | gcc_assert (!e->dest->aux); | |
1843 | ||
496a4ef5 JH |
1844 | gcc_assert ((e->flags & EDGE_EH) |
1845 | || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi))); | |
e9705dc5 AO |
1846 | |
1847 | if (!is_gimple_reg (PHI_RESULT (phi))) | |
1848 | { | |
726a989a | 1849 | mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi))); |
e9705dc5 AO |
1850 | continue; |
1851 | } | |
1852 | ||
1853 | re = find_edge (ret_bb, e->dest); | |
1432b19f | 1854 | gcc_assert (re); |
e9705dc5 AO |
1855 | gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL)) |
1856 | == (e->flags & (EDGE_EH | EDGE_ABNORMAL))); | |
1857 | ||
1858 | SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), | |
1859 | USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re))); | |
110cfe1c JH |
1860 | } |
1861 | } | |
1862 | } | |
1863 | ||
726a989a | 1864 | |
128a79fb KH |
1865 | /* Copy edges from BB into its copy constructed earlier, scale profile |
1866 | accordingly. Edges will be taken care of later. Assume aux | |
90a7788b JJ |
1867 | pointers to point to the copies of each BB. Return true if any |
1868 | debug stmts are left after a statement that must end the basic block. */ | |
726a989a | 1869 | |
90a7788b | 1870 | static bool |
0178d644 | 1871 | copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb) |
e21aff8a | 1872 | { |
cceb1885 | 1873 | basic_block new_bb = (basic_block) bb->aux; |
e21aff8a SB |
1874 | edge_iterator ei; |
1875 | edge old_edge; | |
726a989a | 1876 | gimple_stmt_iterator si; |
e21aff8a | 1877 | int flags; |
90a7788b | 1878 | bool need_debug_cleanup = false; |
e21aff8a SB |
1879 | |
1880 | /* Use the indices from the original blocks to create edges for the | |
1881 | new ones. */ | |
1882 | FOR_EACH_EDGE (old_edge, ei, bb->succs) | |
e0704a46 JH |
1883 | if (!(old_edge->flags & EDGE_EH)) |
1884 | { | |
82d6e6fc | 1885 | edge new_edge; |
e21aff8a | 1886 | |
e0704a46 | 1887 | flags = old_edge->flags; |
e21aff8a | 1888 | |
e0704a46 JH |
1889 | /* Return edges do get a FALLTHRU flag when the get inlined. */ |
1890 | if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags | |
1891 | && old_edge->dest->aux != EXIT_BLOCK_PTR) | |
1892 | flags |= EDGE_FALLTHRU; | |
82d6e6fc KG |
1893 | new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags); |
1894 | new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE; | |
1895 | new_edge->probability = old_edge->probability; | |
e0704a46 | 1896 | } |
e21aff8a SB |
1897 | |
1898 | if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK) | |
90a7788b | 1899 | return false; |
e21aff8a | 1900 | |
726a989a | 1901 | for (si = gsi_start_bb (new_bb); !gsi_end_p (si);) |
e21aff8a | 1902 | { |
726a989a | 1903 | gimple copy_stmt; |
e9705dc5 | 1904 | bool can_throw, nonlocal_goto; |
e21aff8a | 1905 | |
726a989a | 1906 | copy_stmt = gsi_stmt (si); |
b5b8b0ac AO |
1907 | if (!is_gimple_debug (copy_stmt)) |
1908 | { | |
1909 | update_stmt (copy_stmt); | |
1910 | if (gimple_in_ssa_p (cfun)) | |
1911 | mark_symbols_for_renaming (copy_stmt); | |
1912 | } | |
726a989a | 1913 | |
e21aff8a | 1914 | /* Do this before the possible split_block. */ |
726a989a | 1915 | gsi_next (&si); |
e21aff8a SB |
1916 | |
1917 | /* If this tree could throw an exception, there are two | |
1918 | cases where we need to add abnormal edge(s): the | |
1919 | tree wasn't in a region and there is a "current | |
1920 | region" in the caller; or the original tree had | |
1921 | EH edges. In both cases split the block after the tree, | |
1922 | and add abnormal edge(s) as needed; we need both | |
1923 | those from the callee and the caller. | |
1924 | We check whether the copy can throw, because the const | |
1925 | propagation can change an INDIRECT_REF which throws | |
1926 | into a COMPONENT_REF which doesn't. If the copy | |
1927 | can throw, the original could also throw. */ | |
726a989a RB |
1928 | can_throw = stmt_can_throw_internal (copy_stmt); |
1929 | nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt); | |
e9705dc5 AO |
1930 | |
1931 | if (can_throw || nonlocal_goto) | |
e21aff8a | 1932 | { |
90a7788b JJ |
1933 | if (!gsi_end_p (si)) |
1934 | { | |
1935 | while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si))) | |
1936 | gsi_next (&si); | |
1937 | if (gsi_end_p (si)) | |
1938 | need_debug_cleanup = true; | |
1939 | } | |
726a989a | 1940 | if (!gsi_end_p (si)) |
e21aff8a SB |
1941 | /* Note that bb's predecessor edges aren't necessarily |
1942 | right at this point; split_block doesn't care. */ | |
1943 | { | |
1944 | edge e = split_block (new_bb, copy_stmt); | |
110cfe1c | 1945 | |
e21aff8a | 1946 | new_bb = e->dest; |
110cfe1c | 1947 | new_bb->aux = e->src->aux; |
726a989a | 1948 | si = gsi_start_bb (new_bb); |
e21aff8a | 1949 | } |
e9705dc5 | 1950 | } |
e21aff8a | 1951 | |
1d65f45c RH |
1952 | if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH) |
1953 | make_eh_dispatch_edges (copy_stmt); | |
1954 | else if (can_throw) | |
e9705dc5 | 1955 | make_eh_edges (copy_stmt); |
110cfe1c | 1956 | |
e9705dc5 | 1957 | if (nonlocal_goto) |
726a989a | 1958 | make_abnormal_goto_edges (gimple_bb (copy_stmt), true); |
e9705dc5 AO |
1959 | |
1960 | if ((can_throw || nonlocal_goto) | |
1961 | && gimple_in_ssa_p (cfun)) | |
726a989a | 1962 | update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb, |
e9705dc5 | 1963 | can_throw, nonlocal_goto); |
110cfe1c | 1964 | } |
90a7788b | 1965 | return need_debug_cleanup; |
110cfe1c JH |
1966 | } |
1967 | ||
1968 | /* Copy the PHIs. All blocks and edges are copied, some blocks | |
1969 | was possibly split and new outgoing EH edges inserted. | |
1970 | BB points to the block of original function and AUX pointers links | |
1971 | the original and newly copied blocks. */ | |
1972 | ||
1973 | static void | |
1974 | copy_phis_for_bb (basic_block bb, copy_body_data *id) | |
1975 | { | |
3d9a9f94 | 1976 | basic_block const new_bb = (basic_block) bb->aux; |
110cfe1c | 1977 | edge_iterator ei; |
726a989a RB |
1978 | gimple phi; |
1979 | gimple_stmt_iterator si; | |
6a78fd06 RG |
1980 | edge new_edge; |
1981 | bool inserted = false; | |
110cfe1c | 1982 | |
726a989a | 1983 | for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si)) |
110cfe1c | 1984 | { |
726a989a RB |
1985 | tree res, new_res; |
1986 | gimple new_phi; | |
110cfe1c | 1987 | |
726a989a RB |
1988 | phi = gsi_stmt (si); |
1989 | res = PHI_RESULT (phi); | |
1990 | new_res = res; | |
110cfe1c JH |
1991 | if (is_gimple_reg (res)) |
1992 | { | |
726a989a | 1993 | walk_tree (&new_res, copy_tree_body_r, id, NULL); |
110cfe1c JH |
1994 | SSA_NAME_DEF_STMT (new_res) |
1995 | = new_phi = create_phi_node (new_res, new_bb); | |
1996 | FOR_EACH_EDGE (new_edge, ei, new_bb->preds) | |
1997 | { | |
8b3057b3 JH |
1998 | edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb); |
1999 | tree arg; | |
2000 | tree new_arg; | |
726a989a | 2001 | tree block = id->block; |
8b3057b3 JH |
2002 | edge_iterator ei2; |
2003 | ||
20a6bb58 | 2004 | /* When doing partial cloning, we allow PHIs on the entry block |
8b3057b3 JH |
2005 | as long as all the arguments are the same. Find any input |
2006 | edge to see argument to copy. */ | |
2007 | if (!old_edge) | |
2008 | FOR_EACH_EDGE (old_edge, ei2, bb->preds) | |
2009 | if (!old_edge->src->aux) | |
2010 | break; | |
2011 | ||
2012 | arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge); | |
2013 | new_arg = arg; | |
726a989a RB |
2014 | id->block = NULL_TREE; |
2015 | walk_tree (&new_arg, copy_tree_body_r, id, NULL); | |
2016 | id->block = block; | |
110cfe1c | 2017 | gcc_assert (new_arg); |
36b6e793 JJ |
2018 | /* With return slot optimization we can end up with |
2019 | non-gimple (foo *)&this->m, fix that here. */ | |
2020 | if (TREE_CODE (new_arg) != SSA_NAME | |
2021 | && TREE_CODE (new_arg) != FUNCTION_DECL | |
2022 | && !is_gimple_val (new_arg)) | |
2023 | { | |
726a989a RB |
2024 | gimple_seq stmts = NULL; |
2025 | new_arg = force_gimple_operand (new_arg, &stmts, true, NULL); | |
6a78fd06 RG |
2026 | gsi_insert_seq_on_edge (new_edge, stmts); |
2027 | inserted = true; | |
36b6e793 | 2028 | } |
b8698a0f | 2029 | add_phi_arg (new_phi, new_arg, new_edge, |
f5045c96 | 2030 | gimple_phi_arg_location_from_edge (phi, old_edge)); |
110cfe1c | 2031 | } |
e21aff8a SB |
2032 | } |
2033 | } | |
6a78fd06 RG |
2034 | |
2035 | /* Commit the delayed edge insertions. */ | |
2036 | if (inserted) | |
2037 | FOR_EACH_EDGE (new_edge, ei, new_bb->preds) | |
2038 | gsi_commit_one_edge_insert (new_edge, NULL); | |
e21aff8a SB |
2039 | } |
2040 | ||
726a989a | 2041 | |
e21aff8a | 2042 | /* Wrapper for remap_decl so it can be used as a callback. */ |
726a989a | 2043 | |
e21aff8a SB |
2044 | static tree |
2045 | remap_decl_1 (tree decl, void *data) | |
2046 | { | |
1b369fae | 2047 | return remap_decl (decl, (copy_body_data *) data); |
e21aff8a SB |
2048 | } |
2049 | ||
110cfe1c JH |
2050 | /* Build struct function and associated datastructures for the new clone |
2051 | NEW_FNDECL to be build. CALLEE_FNDECL is the original */ | |
2052 | ||
2053 | static void | |
0d63a740 | 2054 | initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count) |
110cfe1c | 2055 | { |
110cfe1c | 2056 | struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
0d63a740 | 2057 | gcov_type count_scale; |
110cfe1c JH |
2058 | |
2059 | if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count) | |
2060 | count_scale = (REG_BR_PROB_BASE * count | |
2061 | / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count); | |
2062 | else | |
0d63a740 | 2063 | count_scale = REG_BR_PROB_BASE; |
110cfe1c JH |
2064 | |
2065 | /* Register specific tree functions. */ | |
726a989a | 2066 | gimple_register_cfg_hooks (); |
39ecc018 JH |
2067 | |
2068 | /* Get clean struct function. */ | |
2069 | push_struct_function (new_fndecl); | |
2070 | ||
2071 | /* We will rebuild these, so just sanity check that they are empty. */ | |
2072 | gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL); | |
2073 | gcc_assert (cfun->local_decls == NULL); | |
2074 | gcc_assert (cfun->cfg == NULL); | |
2075 | gcc_assert (cfun->decl == new_fndecl); | |
2076 | ||
20a6bb58 | 2077 | /* Copy items we preserve during cloning. */ |
39ecc018 JH |
2078 | cfun->static_chain_decl = src_cfun->static_chain_decl; |
2079 | cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area; | |
2080 | cfun->function_end_locus = src_cfun->function_end_locus; | |
2081 | cfun->curr_properties = src_cfun->curr_properties; | |
2082 | cfun->last_verified = src_cfun->last_verified; | |
39ecc018 JH |
2083 | cfun->va_list_gpr_size = src_cfun->va_list_gpr_size; |
2084 | cfun->va_list_fpr_size = src_cfun->va_list_fpr_size; | |
39ecc018 JH |
2085 | cfun->has_nonlocal_label = src_cfun->has_nonlocal_label; |
2086 | cfun->stdarg = src_cfun->stdarg; | |
2087 | cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p; | |
2088 | cfun->after_inlining = src_cfun->after_inlining; | |
8f4f502f EB |
2089 | cfun->can_throw_non_call_exceptions |
2090 | = src_cfun->can_throw_non_call_exceptions; | |
39ecc018 JH |
2091 | cfun->returns_struct = src_cfun->returns_struct; |
2092 | cfun->returns_pcc_struct = src_cfun->returns_pcc_struct; | |
2093 | cfun->after_tree_profile = src_cfun->after_tree_profile; | |
2094 | ||
110cfe1c JH |
2095 | init_empty_tree_cfg (); |
2096 | ||
0d63a740 | 2097 | profile_status_for_function (cfun) = profile_status_for_function (src_cfun); |
110cfe1c JH |
2098 | ENTRY_BLOCK_PTR->count = |
2099 | (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale / | |
2100 | REG_BR_PROB_BASE); | |
0d63a740 JH |
2101 | ENTRY_BLOCK_PTR->frequency |
2102 | = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency; | |
110cfe1c JH |
2103 | EXIT_BLOCK_PTR->count = |
2104 | (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale / | |
2105 | REG_BR_PROB_BASE); | |
2106 | EXIT_BLOCK_PTR->frequency = | |
0d63a740 | 2107 | EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency; |
110cfe1c JH |
2108 | if (src_cfun->eh) |
2109 | init_eh_for_function (); | |
2110 | ||
2111 | if (src_cfun->gimple_df) | |
2112 | { | |
5db9ba0c | 2113 | init_tree_ssa (cfun); |
110cfe1c JH |
2114 | cfun->gimple_df->in_ssa_p = true; |
2115 | init_ssa_operands (); | |
2116 | } | |
2117 | pop_cfun (); | |
2118 | } | |
2119 | ||
90a7788b JJ |
2120 | /* Helper function for copy_cfg_body. Move debug stmts from the end |
2121 | of NEW_BB to the beginning of successor basic blocks when needed. If the | |
2122 | successor has multiple predecessors, reset them, otherwise keep | |
2123 | their value. */ | |
2124 | ||
2125 | static void | |
2126 | maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb) | |
2127 | { | |
2128 | edge e; | |
2129 | edge_iterator ei; | |
2130 | gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb); | |
2131 | ||
2132 | if (gsi_end_p (si) | |
2133 | || gsi_one_before_end_p (si) | |
2134 | || !(stmt_can_throw_internal (gsi_stmt (si)) | |
2135 | || stmt_can_make_abnormal_goto (gsi_stmt (si)))) | |
2136 | return; | |
2137 | ||
2138 | FOR_EACH_EDGE (e, ei, new_bb->succs) | |
2139 | { | |
2140 | gimple_stmt_iterator ssi = gsi_last_bb (new_bb); | |
2141 | gimple_stmt_iterator dsi = gsi_after_labels (e->dest); | |
2142 | while (is_gimple_debug (gsi_stmt (ssi))) | |
2143 | { | |
2144 | gimple stmt = gsi_stmt (ssi), new_stmt; | |
2145 | tree var; | |
2146 | tree value; | |
2147 | ||
2148 | /* For the last edge move the debug stmts instead of copying | |
2149 | them. */ | |
2150 | if (ei_one_before_end_p (ei)) | |
2151 | { | |
2152 | si = ssi; | |
2153 | gsi_prev (&ssi); | |
2154 | if (!single_pred_p (e->dest)) | |
2155 | gimple_debug_bind_reset_value (stmt); | |
2156 | gsi_remove (&si, false); | |
2157 | gsi_insert_before (&dsi, stmt, GSI_SAME_STMT); | |
2158 | continue; | |
2159 | } | |
2160 | ||
2161 | var = gimple_debug_bind_get_var (stmt); | |
2162 | if (single_pred_p (e->dest)) | |
2163 | { | |
2164 | value = gimple_debug_bind_get_value (stmt); | |
2165 | value = unshare_expr (value); | |
2166 | } | |
2167 | else | |
2168 | value = NULL_TREE; | |
2169 | new_stmt = gimple_build_debug_bind (var, value, stmt); | |
2170 | gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT); | |
2171 | VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt); | |
2172 | gsi_prev (&ssi); | |
2173 | } | |
2174 | } | |
2175 | } | |
2176 | ||
e21aff8a SB |
2177 | /* Make a copy of the body of FN so that it can be inserted inline in |
2178 | another function. Walks FN via CFG, returns new fndecl. */ | |
2179 | ||
2180 | static tree | |
0d63a740 | 2181 | copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale, |
91382288 JH |
2182 | basic_block entry_block_map, basic_block exit_block_map, |
2183 | bitmap blocks_to_copy, basic_block new_entry) | |
e21aff8a | 2184 | { |
1b369fae | 2185 | tree callee_fndecl = id->src_fn; |
e21aff8a | 2186 | /* Original cfun for the callee, doesn't change. */ |
1b369fae | 2187 | struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
110cfe1c | 2188 | struct function *cfun_to_copy; |
e21aff8a SB |
2189 | basic_block bb; |
2190 | tree new_fndecl = NULL; | |
90a7788b | 2191 | bool need_debug_cleanup = false; |
0d63a740 | 2192 | gcov_type count_scale; |
110cfe1c | 2193 | int last; |
20a6bb58 JH |
2194 | int incoming_frequency = 0; |
2195 | gcov_type incoming_count = 0; | |
e21aff8a | 2196 | |
1b369fae | 2197 | if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count) |
e21aff8a | 2198 | count_scale = (REG_BR_PROB_BASE * count |
1b369fae | 2199 | / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count); |
e21aff8a | 2200 | else |
0d63a740 | 2201 | count_scale = REG_BR_PROB_BASE; |
e21aff8a SB |
2202 | |
2203 | /* Register specific tree functions. */ | |
726a989a | 2204 | gimple_register_cfg_hooks (); |
e21aff8a | 2205 | |
b35366ce JH |
2206 | /* If we are inlining just region of the function, make sure to connect new entry |
2207 | to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute | |
2208 | frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and | |
20a6bb58 | 2209 | probabilities of edges incoming from nonduplicated region. */ |
b35366ce JH |
2210 | if (new_entry) |
2211 | { | |
2212 | edge e; | |
2213 | edge_iterator ei; | |
2214 | ||
2215 | FOR_EACH_EDGE (e, ei, new_entry->preds) | |
2216 | if (!e->src->aux) | |
2217 | { | |
20a6bb58 JH |
2218 | incoming_frequency += EDGE_FREQUENCY (e); |
2219 | incoming_count += e->count; | |
b35366ce | 2220 | } |
20a6bb58 JH |
2221 | incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE; |
2222 | incoming_frequency | |
2223 | = incoming_frequency * frequency_scale / REG_BR_PROB_BASE; | |
2224 | ENTRY_BLOCK_PTR->count = incoming_count; | |
2225 | ENTRY_BLOCK_PTR->frequency = incoming_frequency; | |
b35366ce JH |
2226 | } |
2227 | ||
e21aff8a SB |
2228 | /* Must have a CFG here at this point. */ |
2229 | gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION | |
2230 | (DECL_STRUCT_FUNCTION (callee_fndecl))); | |
2231 | ||
110cfe1c JH |
2232 | cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
2233 | ||
e21aff8a SB |
2234 | ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map; |
2235 | EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map; | |
110cfe1c JH |
2236 | entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy); |
2237 | exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy); | |
e21aff8a | 2238 | |
e21aff8a SB |
2239 | /* Duplicate any exception-handling regions. */ |
2240 | if (cfun->eh) | |
1d65f45c RH |
2241 | id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr, |
2242 | remap_decl_1, id); | |
726a989a | 2243 | |
e21aff8a SB |
2244 | /* Use aux pointers to map the original blocks to copy. */ |
2245 | FOR_EACH_BB_FN (bb, cfun_to_copy) | |
91382288 JH |
2246 | if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index)) |
2247 | { | |
2248 | basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale); | |
2249 | bb->aux = new_bb; | |
2250 | new_bb->aux = bb; | |
2251 | } | |
110cfe1c | 2252 | |
7c57be85 | 2253 | last = last_basic_block; |
726a989a | 2254 | |
e21aff8a SB |
2255 | /* Now that we've duplicated the blocks, duplicate their edges. */ |
2256 | FOR_ALL_BB_FN (bb, cfun_to_copy) | |
91382288 JH |
2257 | if (!blocks_to_copy |
2258 | || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index))) | |
2259 | need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map); | |
726a989a | 2260 | |
91382288 | 2261 | if (new_entry) |
110cfe1c | 2262 | { |
b35366ce | 2263 | edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU); |
91382288 | 2264 | e->probability = REG_BR_PROB_BASE; |
20a6bb58 | 2265 | e->count = incoming_count; |
110cfe1c | 2266 | } |
726a989a | 2267 | |
8b3057b3 JH |
2268 | if (gimple_in_ssa_p (cfun)) |
2269 | FOR_ALL_BB_FN (bb, cfun_to_copy) | |
2270 | if (!blocks_to_copy | |
2271 | || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index))) | |
2272 | copy_phis_for_bb (bb, id); | |
2273 | ||
91382288 JH |
2274 | FOR_ALL_BB_FN (bb, cfun_to_copy) |
2275 | if (bb->aux) | |
2276 | { | |
2277 | if (need_debug_cleanup | |
2278 | && bb->index != ENTRY_BLOCK | |
2279 | && bb->index != EXIT_BLOCK) | |
2280 | maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux); | |
2281 | ((basic_block)bb->aux)->aux = NULL; | |
2282 | bb->aux = NULL; | |
2283 | } | |
2284 | ||
110cfe1c JH |
2285 | /* Zero out AUX fields of newly created block during EH edge |
2286 | insertion. */ | |
7c57be85 | 2287 | for (; last < last_basic_block; last++) |
90a7788b JJ |
2288 | { |
2289 | if (need_debug_cleanup) | |
2290 | maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last)); | |
2291 | BASIC_BLOCK (last)->aux = NULL; | |
2292 | } | |
110cfe1c JH |
2293 | entry_block_map->aux = NULL; |
2294 | exit_block_map->aux = NULL; | |
e21aff8a | 2295 | |
1d65f45c RH |
2296 | if (id->eh_map) |
2297 | { | |
2298 | pointer_map_destroy (id->eh_map); | |
2299 | id->eh_map = NULL; | |
2300 | } | |
2301 | ||
e21aff8a SB |
2302 | return new_fndecl; |
2303 | } | |
2304 | ||
b5b8b0ac AO |
2305 | /* Copy the debug STMT using ID. We deal with these statements in a |
2306 | special way: if any variable in their VALUE expression wasn't | |
2307 | remapped yet, we won't remap it, because that would get decl uids | |
2308 | out of sync, causing codegen differences between -g and -g0. If | |
2309 | this arises, we drop the VALUE expression altogether. */ | |
2310 | ||
2311 | static void | |
2312 | copy_debug_stmt (gimple stmt, copy_body_data *id) | |
2313 | { | |
2314 | tree t, *n; | |
2315 | struct walk_stmt_info wi; | |
2316 | ||
2317 | t = id->block; | |
2318 | if (gimple_block (stmt)) | |
2319 | { | |
2320 | tree *n; | |
2321 | n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt)); | |
2322 | if (n) | |
2323 | t = *n; | |
2324 | } | |
2325 | gimple_set_block (stmt, t); | |
2326 | ||
2327 | /* Remap all the operands in COPY. */ | |
2328 | memset (&wi, 0, sizeof (wi)); | |
2329 | wi.info = id; | |
2330 | ||
2331 | processing_debug_stmt = 1; | |
2332 | ||
2333 | t = gimple_debug_bind_get_var (stmt); | |
2334 | ||
2335 | if (TREE_CODE (t) == PARM_DECL && id->debug_map | |
2336 | && (n = (tree *) pointer_map_contains (id->debug_map, t))) | |
2337 | { | |
2338 | gcc_assert (TREE_CODE (*n) == VAR_DECL); | |
2339 | t = *n; | |
2340 | } | |
d17af147 JJ |
2341 | else if (TREE_CODE (t) == VAR_DECL |
2342 | && !TREE_STATIC (t) | |
2343 | && gimple_in_ssa_p (cfun) | |
2344 | && !pointer_map_contains (id->decl_map, t) | |
2345 | && !var_ann (t)) | |
2346 | /* T is a non-localized variable. */; | |
b5b8b0ac AO |
2347 | else |
2348 | walk_tree (&t, remap_gimple_op_r, &wi, NULL); | |
2349 | ||
2350 | gimple_debug_bind_set_var (stmt, t); | |
2351 | ||
2352 | if (gimple_debug_bind_has_value_p (stmt)) | |
2353 | walk_tree (gimple_debug_bind_get_value_ptr (stmt), | |
2354 | remap_gimple_op_r, &wi, NULL); | |
2355 | ||
2356 | /* Punt if any decl couldn't be remapped. */ | |
2357 | if (processing_debug_stmt < 0) | |
2358 | gimple_debug_bind_reset_value (stmt); | |
2359 | ||
2360 | processing_debug_stmt = 0; | |
2361 | ||
2362 | update_stmt (stmt); | |
2363 | if (gimple_in_ssa_p (cfun)) | |
2364 | mark_symbols_for_renaming (stmt); | |
2365 | } | |
2366 | ||
2367 | /* Process deferred debug stmts. In order to give values better odds | |
2368 | of being successfully remapped, we delay the processing of debug | |
2369 | stmts until all other stmts that might require remapping are | |
2370 | processed. */ | |
2371 | ||
2372 | static void | |
2373 | copy_debug_stmts (copy_body_data *id) | |
2374 | { | |
2375 | size_t i; | |
2376 | gimple stmt; | |
2377 | ||
2378 | if (!id->debug_stmts) | |
2379 | return; | |
2380 | ||
ac47786e | 2381 | FOR_EACH_VEC_ELT (gimple, id->debug_stmts, i, stmt) |
b5b8b0ac AO |
2382 | copy_debug_stmt (stmt, id); |
2383 | ||
2384 | VEC_free (gimple, heap, id->debug_stmts); | |
2385 | } | |
2386 | ||
f82a627c EB |
2387 | /* Make a copy of the body of SRC_FN so that it can be inserted inline in |
2388 | another function. */ | |
2389 | ||
2390 | static tree | |
2391 | copy_tree_body (copy_body_data *id) | |
2392 | { | |
2393 | tree fndecl = id->src_fn; | |
2394 | tree body = DECL_SAVED_TREE (fndecl); | |
2395 | ||
2396 | walk_tree (&body, copy_tree_body_r, id, NULL); | |
2397 | ||
2398 | return body; | |
2399 | } | |
2400 | ||
b5b8b0ac AO |
2401 | /* Make a copy of the body of FN so that it can be inserted inline in |
2402 | another function. */ | |
2403 | ||
e21aff8a | 2404 | static tree |
0d63a740 | 2405 | copy_body (copy_body_data *id, gcov_type count, int frequency_scale, |
91382288 JH |
2406 | basic_block entry_block_map, basic_block exit_block_map, |
2407 | bitmap blocks_to_copy, basic_block new_entry) | |
e21aff8a | 2408 | { |
1b369fae | 2409 | tree fndecl = id->src_fn; |
e21aff8a SB |
2410 | tree body; |
2411 | ||
2412 | /* If this body has a CFG, walk CFG and copy. */ | |
2413 | gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl))); | |
91382288 JH |
2414 | body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map, |
2415 | blocks_to_copy, new_entry); | |
b5b8b0ac | 2416 | copy_debug_stmts (id); |
e21aff8a SB |
2417 | |
2418 | return body; | |
2419 | } | |
2420 | ||
04482133 AO |
2421 | /* Return true if VALUE is an ADDR_EXPR of an automatic variable |
2422 | defined in function FN, or of a data member thereof. */ | |
2423 | ||
2424 | static bool | |
2425 | self_inlining_addr_expr (tree value, tree fn) | |
2426 | { | |
2427 | tree var; | |
2428 | ||
2429 | if (TREE_CODE (value) != ADDR_EXPR) | |
2430 | return false; | |
2431 | ||
2432 | var = get_base_address (TREE_OPERAND (value, 0)); | |
e21aff8a | 2433 | |
50886bf1 | 2434 | return var && auto_var_in_fn_p (var, fn); |
04482133 AO |
2435 | } |
2436 | ||
b5b8b0ac AO |
2437 | /* Append to BB a debug annotation that binds VAR to VALUE, inheriting |
2438 | lexical block and line number information from base_stmt, if given, | |
2439 | or from the last stmt of the block otherwise. */ | |
2440 | ||
2441 | static gimple | |
2442 | insert_init_debug_bind (copy_body_data *id, | |
2443 | basic_block bb, tree var, tree value, | |
2444 | gimple base_stmt) | |
2445 | { | |
2446 | gimple note; | |
2447 | gimple_stmt_iterator gsi; | |
2448 | tree tracked_var; | |
2449 | ||
2450 | if (!gimple_in_ssa_p (id->src_cfun)) | |
2451 | return NULL; | |
2452 | ||
2453 | if (!MAY_HAVE_DEBUG_STMTS) | |
2454 | return NULL; | |
2455 | ||
2456 | tracked_var = target_for_debug_bind (var); | |
2457 | if (!tracked_var) | |
2458 | return NULL; | |
2459 | ||
2460 | if (bb) | |
2461 | { | |
2462 | gsi = gsi_last_bb (bb); | |
2463 | if (!base_stmt && !gsi_end_p (gsi)) | |
2464 | base_stmt = gsi_stmt (gsi); | |
2465 | } | |
2466 | ||
2467 | note = gimple_build_debug_bind (tracked_var, value, base_stmt); | |
2468 | ||
2469 | if (bb) | |
2470 | { | |
2471 | if (!gsi_end_p (gsi)) | |
2472 | gsi_insert_after (&gsi, note, GSI_SAME_STMT); | |
2473 | else | |
2474 | gsi_insert_before (&gsi, note, GSI_SAME_STMT); | |
2475 | } | |
2476 | ||
2477 | return note; | |
2478 | } | |
2479 | ||
6de9cd9a | 2480 | static void |
b5b8b0ac | 2481 | insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt) |
0f1961a2 | 2482 | { |
0f1961a2 JH |
2483 | /* If VAR represents a zero-sized variable, it's possible that the |
2484 | assignment statement may result in no gimple statements. */ | |
2485 | if (init_stmt) | |
c2a4718a JJ |
2486 | { |
2487 | gimple_stmt_iterator si = gsi_last_bb (bb); | |
0f1961a2 | 2488 | |
bfb0b886 RG |
2489 | /* We can end up with init statements that store to a non-register |
2490 | from a rhs with a conversion. Handle that here by forcing the | |
2491 | rhs into a temporary. gimple_regimplify_operands is not | |
2492 | prepared to do this for us. */ | |
b5b8b0ac AO |
2493 | if (!is_gimple_debug (init_stmt) |
2494 | && !is_gimple_reg (gimple_assign_lhs (init_stmt)) | |
bfb0b886 RG |
2495 | && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt))) |
2496 | && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS) | |
2497 | { | |
2498 | tree rhs = build1 (gimple_assign_rhs_code (init_stmt), | |
2499 | gimple_expr_type (init_stmt), | |
2500 | gimple_assign_rhs1 (init_stmt)); | |
2501 | rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false, | |
2502 | GSI_NEW_STMT); | |
2503 | gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs)); | |
2504 | gimple_assign_set_rhs1 (init_stmt, rhs); | |
2505 | } | |
c2a4718a JJ |
2506 | gsi_insert_after (&si, init_stmt, GSI_NEW_STMT); |
2507 | gimple_regimplify_operands (init_stmt, &si); | |
2508 | mark_symbols_for_renaming (init_stmt); | |
b5b8b0ac AO |
2509 | |
2510 | if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS) | |
2511 | { | |
2512 | tree var, def = gimple_assign_lhs (init_stmt); | |
2513 | ||
2514 | if (TREE_CODE (def) == SSA_NAME) | |
2515 | var = SSA_NAME_VAR (def); | |
2516 | else | |
2517 | var = def; | |
2518 | ||
2519 | insert_init_debug_bind (id, bb, var, def, init_stmt); | |
2520 | } | |
c2a4718a | 2521 | } |
0f1961a2 JH |
2522 | } |
2523 | ||
2524 | /* Initialize parameter P with VALUE. If needed, produce init statement | |
2525 | at the end of BB. When BB is NULL, we return init statement to be | |
2526 | output later. */ | |
2527 | static gimple | |
1b369fae | 2528 | setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn, |
e21aff8a | 2529 | basic_block bb, tree *vars) |
6de9cd9a | 2530 | { |
0f1961a2 | 2531 | gimple init_stmt = NULL; |
6de9cd9a | 2532 | tree var; |
f4088621 | 2533 | tree rhs = value; |
110cfe1c JH |
2534 | tree def = (gimple_in_ssa_p (cfun) |
2535 | ? gimple_default_def (id->src_cfun, p) : NULL); | |
6de9cd9a | 2536 | |
f4088621 RG |
2537 | if (value |
2538 | && value != error_mark_node | |
2539 | && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))) | |
c54e3854 RG |
2540 | { |
2541 | if (fold_convertible_p (TREE_TYPE (p), value)) | |
2542 | rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value); | |
2543 | else | |
2544 | /* ??? For valid (GIMPLE) programs we should not end up here. | |
2545 | Still if something has gone wrong and we end up with truly | |
2546 | mismatched types here, fall back to using a VIEW_CONVERT_EXPR | |
2547 | to not leak invalid GIMPLE to the following passes. */ | |
2548 | rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value); | |
2549 | } | |
f4088621 | 2550 | |
b5b8b0ac AO |
2551 | /* Make an equivalent VAR_DECL. Note that we must NOT remap the type |
2552 | here since the type of this decl must be visible to the calling | |
2553 | function. */ | |
2554 | var = copy_decl_to_var (p, id); | |
2555 | ||
2556 | /* We're actually using the newly-created var. */ | |
2557 | if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL) | |
2558 | { | |
2559 | get_var_ann (var); | |
2560 | add_referenced_var (var); | |
2561 | } | |
2562 | ||
2563 | /* Declare this new variable. */ | |
910ad8de | 2564 | DECL_CHAIN (var) = *vars; |
b5b8b0ac AO |
2565 | *vars = var; |
2566 | ||
2567 | /* Make gimplifier happy about this variable. */ | |
2568 | DECL_SEEN_IN_BIND_EXPR_P (var) = 1; | |
2569 | ||
110cfe1c | 2570 | /* If the parameter is never assigned to, has no SSA_NAMEs created, |
b5b8b0ac AO |
2571 | we would not need to create a new variable here at all, if it |
2572 | weren't for debug info. Still, we can just use the argument | |
2573 | value. */ | |
6de9cd9a DN |
2574 | if (TREE_READONLY (p) |
2575 | && !TREE_ADDRESSABLE (p) | |
110cfe1c JH |
2576 | && value && !TREE_SIDE_EFFECTS (value) |
2577 | && !def) | |
6de9cd9a | 2578 | { |
84936f6f RH |
2579 | /* We may produce non-gimple trees by adding NOPs or introduce |
2580 | invalid sharing when operand is not really constant. | |
2581 | It is not big deal to prohibit constant propagation here as | |
2582 | we will constant propagate in DOM1 pass anyway. */ | |
2583 | if (is_gimple_min_invariant (value) | |
f4088621 RG |
2584 | && useless_type_conversion_p (TREE_TYPE (p), |
2585 | TREE_TYPE (value)) | |
04482133 AO |
2586 | /* We have to be very careful about ADDR_EXPR. Make sure |
2587 | the base variable isn't a local variable of the inlined | |
2588 | function, e.g., when doing recursive inlining, direct or | |
2589 | mutually-recursive or whatever, which is why we don't | |
2590 | just test whether fn == current_function_decl. */ | |
2591 | && ! self_inlining_addr_expr (value, fn)) | |
6de9cd9a | 2592 | { |
6de9cd9a | 2593 | insert_decl_map (id, p, value); |
b5b8b0ac AO |
2594 | insert_debug_decl_map (id, p, var); |
2595 | return insert_init_debug_bind (id, bb, var, value, NULL); | |
6de9cd9a DN |
2596 | } |
2597 | } | |
2598 | ||
6de9cd9a DN |
2599 | /* Register the VAR_DECL as the equivalent for the PARM_DECL; |
2600 | that way, when the PARM_DECL is encountered, it will be | |
2601 | automatically replaced by the VAR_DECL. */ | |
7c7d3047 | 2602 | insert_decl_map (id, p, var); |
6de9cd9a | 2603 | |
6de9cd9a DN |
2604 | /* Even if P was TREE_READONLY, the new VAR should not be. |
2605 | In the original code, we would have constructed a | |
2606 | temporary, and then the function body would have never | |
2607 | changed the value of P. However, now, we will be | |
2608 | constructing VAR directly. The constructor body may | |
2609 | change its value multiple times as it is being | |
2610 | constructed. Therefore, it must not be TREE_READONLY; | |
2611 | the back-end assumes that TREE_READONLY variable is | |
2612 | assigned to only once. */ | |
2613 | if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p))) | |
2614 | TREE_READONLY (var) = 0; | |
2615 | ||
110cfe1c JH |
2616 | /* If there is no setup required and we are in SSA, take the easy route |
2617 | replacing all SSA names representing the function parameter by the | |
2618 | SSA name passed to function. | |
2619 | ||
2620 | We need to construct map for the variable anyway as it might be used | |
2621 | in different SSA names when parameter is set in function. | |
2622 | ||
8454d27e JH |
2623 | Do replacement at -O0 for const arguments replaced by constant. |
2624 | This is important for builtin_constant_p and other construct requiring | |
b5b8b0ac | 2625 | constant argument to be visible in inlined function body. */ |
110cfe1c | 2626 | if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p) |
8454d27e JH |
2627 | && (optimize |
2628 | || (TREE_READONLY (p) | |
2629 | && is_gimple_min_invariant (rhs))) | |
110cfe1c | 2630 | && (TREE_CODE (rhs) == SSA_NAME |
9b718f81 JH |
2631 | || is_gimple_min_invariant (rhs)) |
2632 | && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)) | |
110cfe1c JH |
2633 | { |
2634 | insert_decl_map (id, def, rhs); | |
b5b8b0ac | 2635 | return insert_init_debug_bind (id, bb, var, rhs, NULL); |
110cfe1c JH |
2636 | } |
2637 | ||
f6f2da7d JH |
2638 | /* If the value of argument is never used, don't care about initializing |
2639 | it. */ | |
1cf5abb3 | 2640 | if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p)) |
f6f2da7d JH |
2641 | { |
2642 | gcc_assert (!value || !TREE_SIDE_EFFECTS (value)); | |
b5b8b0ac | 2643 | return insert_init_debug_bind (id, bb, var, rhs, NULL); |
f6f2da7d JH |
2644 | } |
2645 | ||
6de9cd9a DN |
2646 | /* Initialize this VAR_DECL from the equivalent argument. Convert |
2647 | the argument to the proper type in case it was promoted. */ | |
2648 | if (value) | |
2649 | { | |
6de9cd9a | 2650 | if (rhs == error_mark_node) |
110cfe1c | 2651 | { |
7c7d3047 | 2652 | insert_decl_map (id, p, var); |
b5b8b0ac | 2653 | return insert_init_debug_bind (id, bb, var, rhs, NULL); |
110cfe1c | 2654 | } |
afe08db5 | 2655 | |
73dab33b | 2656 | STRIP_USELESS_TYPE_CONVERSION (rhs); |
6de9cd9a | 2657 | |
726a989a | 2658 | /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we |
6de9cd9a | 2659 | keep our trees in gimple form. */ |
110cfe1c JH |
2660 | if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p)) |
2661 | { | |
2662 | def = remap_ssa_name (def, id); | |
726a989a | 2663 | init_stmt = gimple_build_assign (def, rhs); |
110cfe1c JH |
2664 | SSA_NAME_IS_DEFAULT_DEF (def) = 0; |
2665 | set_default_def (var, NULL); | |
2666 | } | |
2667 | else | |
726a989a | 2668 | init_stmt = gimple_build_assign (var, rhs); |
6de9cd9a | 2669 | |
0f1961a2 | 2670 | if (bb && init_stmt) |
b5b8b0ac | 2671 | insert_init_stmt (id, bb, init_stmt); |
6de9cd9a | 2672 | } |
0f1961a2 | 2673 | return init_stmt; |
6de9cd9a DN |
2674 | } |
2675 | ||
d4e4baa9 | 2676 | /* Generate code to initialize the parameters of the function at the |
726a989a | 2677 | top of the stack in ID from the GIMPLE_CALL STMT. */ |
d4e4baa9 | 2678 | |
e21aff8a | 2679 | static void |
726a989a | 2680 | initialize_inlined_parameters (copy_body_data *id, gimple stmt, |
e21aff8a | 2681 | tree fn, basic_block bb) |
d4e4baa9 | 2682 | { |
d4e4baa9 | 2683 | tree parms; |
726a989a | 2684 | size_t i; |
d4e4baa9 | 2685 | tree p; |
d436bff8 | 2686 | tree vars = NULL_TREE; |
726a989a | 2687 | tree static_chain = gimple_call_chain (stmt); |
d4e4baa9 AO |
2688 | |
2689 | /* Figure out what the parameters are. */ | |
18c6ada9 | 2690 | parms = DECL_ARGUMENTS (fn); |
d4e4baa9 | 2691 | |
d4e4baa9 AO |
2692 | /* Loop through the parameter declarations, replacing each with an |
2693 | equivalent VAR_DECL, appropriately initialized. */ | |
910ad8de | 2694 | for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++) |
726a989a RB |
2695 | { |
2696 | tree val; | |
2697 | val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL; | |
2698 | setup_one_parameter (id, p, val, fn, bb, &vars); | |
2699 | } | |
ea184343 RG |
2700 | /* After remapping parameters remap their types. This has to be done |
2701 | in a second loop over all parameters to appropriately remap | |
2702 | variable sized arrays when the size is specified in a | |
2703 | parameter following the array. */ | |
910ad8de | 2704 | for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++) |
ea184343 RG |
2705 | { |
2706 | tree *varp = (tree *) pointer_map_contains (id->decl_map, p); | |
2707 | if (varp | |
2708 | && TREE_CODE (*varp) == VAR_DECL) | |
2709 | { | |
72aa3dca | 2710 | tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p) |
ea184343 | 2711 | ? gimple_default_def (id->src_cfun, p) : NULL); |
72aa3dca RG |
2712 | tree var = *varp; |
2713 | TREE_TYPE (var) = remap_type (TREE_TYPE (var), id); | |
ea184343 RG |
2714 | /* Also remap the default definition if it was remapped |
2715 | to the default definition of the parameter replacement | |
2716 | by the parameter setup. */ | |
72aa3dca | 2717 | if (def) |
ea184343 RG |
2718 | { |
2719 | tree *defp = (tree *) pointer_map_contains (id->decl_map, def); | |
2720 | if (defp | |
2721 | && TREE_CODE (*defp) == SSA_NAME | |
72aa3dca RG |
2722 | && SSA_NAME_VAR (*defp) == var) |
2723 | TREE_TYPE (*defp) = TREE_TYPE (var); | |
ea184343 RG |
2724 | } |
2725 | } | |
2726 | } | |
4838c5ee | 2727 | |
6de9cd9a DN |
2728 | /* Initialize the static chain. */ |
2729 | p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl; | |
ea99e0be | 2730 | gcc_assert (fn != current_function_decl); |
6de9cd9a DN |
2731 | if (p) |
2732 | { | |
2733 | /* No static chain? Seems like a bug in tree-nested.c. */ | |
1e128c5f | 2734 | gcc_assert (static_chain); |
4838c5ee | 2735 | |
e21aff8a | 2736 | setup_one_parameter (id, p, static_chain, fn, bb, &vars); |
4838c5ee AO |
2737 | } |
2738 | ||
e21aff8a | 2739 | declare_inline_vars (id->block, vars); |
d4e4baa9 AO |
2740 | } |
2741 | ||
726a989a | 2742 | |
e21aff8a SB |
2743 | /* Declare a return variable to replace the RESULT_DECL for the |
2744 | function we are calling. An appropriate DECL_STMT is returned. | |
2745 | The USE_STMT is filled to contain a use of the declaration to | |
2746 | indicate the return value of the function. | |
2747 | ||
110cfe1c JH |
2748 | RETURN_SLOT, if non-null is place where to store the result. It |
2749 | is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null, | |
726a989a | 2750 | was the LHS of the MODIFY_EXPR to which this call is the RHS. |
7740f00d | 2751 | |
0f900dfa JJ |
2752 | The return value is a (possibly null) value that holds the result |
2753 | as seen by the caller. */ | |
d4e4baa9 | 2754 | |
d436bff8 | 2755 | static tree |
6938f93f JH |
2756 | declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest, |
2757 | basic_block entry_bb) | |
d4e4baa9 | 2758 | { |
1b369fae RH |
2759 | tree callee = id->src_fn; |
2760 | tree caller = id->dst_fn; | |
7740f00d RH |
2761 | tree result = DECL_RESULT (callee); |
2762 | tree callee_type = TREE_TYPE (result); | |
ea2edf88 | 2763 | tree caller_type; |
7740f00d | 2764 | tree var, use; |
d4e4baa9 | 2765 | |
ea2edf88 RG |
2766 | /* Handle type-mismatches in the function declaration return type |
2767 | vs. the call expression. */ | |
2768 | if (modify_dest) | |
2769 | caller_type = TREE_TYPE (modify_dest); | |
2770 | else | |
2771 | caller_type = TREE_TYPE (TREE_TYPE (callee)); | |
2772 | ||
d4e4baa9 AO |
2773 | /* We don't need to do anything for functions that don't return |
2774 | anything. */ | |
7740f00d | 2775 | if (!result || VOID_TYPE_P (callee_type)) |
0f900dfa | 2776 | return NULL_TREE; |
d4e4baa9 | 2777 | |
cc77ae10 | 2778 | /* If there was a return slot, then the return value is the |
7740f00d | 2779 | dereferenced address of that object. */ |
110cfe1c | 2780 | if (return_slot) |
7740f00d | 2781 | { |
110cfe1c | 2782 | /* The front end shouldn't have used both return_slot and |
7740f00d | 2783 | a modify expression. */ |
1e128c5f | 2784 | gcc_assert (!modify_dest); |
cc77ae10 | 2785 | if (DECL_BY_REFERENCE (result)) |
110cfe1c JH |
2786 | { |
2787 | tree return_slot_addr = build_fold_addr_expr (return_slot); | |
2788 | STRIP_USELESS_TYPE_CONVERSION (return_slot_addr); | |
2789 | ||
2790 | /* We are going to construct *&return_slot and we can't do that | |
b8698a0f | 2791 | for variables believed to be not addressable. |
110cfe1c JH |
2792 | |
2793 | FIXME: This check possibly can match, because values returned | |
2794 | via return slot optimization are not believed to have address | |
2795 | taken by alias analysis. */ | |
2796 | gcc_assert (TREE_CODE (return_slot) != SSA_NAME); | |
110cfe1c JH |
2797 | var = return_slot_addr; |
2798 | } | |
cc77ae10 | 2799 | else |
110cfe1c JH |
2800 | { |
2801 | var = return_slot; | |
2802 | gcc_assert (TREE_CODE (var) != SSA_NAME); | |
b5ca517c | 2803 | TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result); |
110cfe1c | 2804 | } |
0890b981 AP |
2805 | if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE |
2806 | || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE) | |
2807 | && !DECL_GIMPLE_REG_P (result) | |
22918034 | 2808 | && DECL_P (var)) |
0890b981 | 2809 | DECL_GIMPLE_REG_P (var) = 0; |
7740f00d RH |
2810 | use = NULL; |
2811 | goto done; | |
2812 | } | |
2813 | ||
2814 | /* All types requiring non-trivial constructors should have been handled. */ | |
1e128c5f | 2815 | gcc_assert (!TREE_ADDRESSABLE (callee_type)); |
7740f00d RH |
2816 | |
2817 | /* Attempt to avoid creating a new temporary variable. */ | |
110cfe1c JH |
2818 | if (modify_dest |
2819 | && TREE_CODE (modify_dest) != SSA_NAME) | |
7740f00d RH |
2820 | { |
2821 | bool use_it = false; | |
2822 | ||
2823 | /* We can't use MODIFY_DEST if there's type promotion involved. */ | |
f4088621 | 2824 | if (!useless_type_conversion_p (callee_type, caller_type)) |
7740f00d RH |
2825 | use_it = false; |
2826 | ||
2827 | /* ??? If we're assigning to a variable sized type, then we must | |
2828 | reuse the destination variable, because we've no good way to | |
2829 | create variable sized temporaries at this point. */ | |
2830 | else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST) | |
2831 | use_it = true; | |
2832 | ||
2833 | /* If the callee cannot possibly modify MODIFY_DEST, then we can | |
2834 | reuse it as the result of the call directly. Don't do this if | |
2835 | it would promote MODIFY_DEST to addressable. */ | |
e2f9fe42 RH |
2836 | else if (TREE_ADDRESSABLE (result)) |
2837 | use_it = false; | |
2838 | else | |
2839 | { | |
2840 | tree base_m = get_base_address (modify_dest); | |
2841 | ||
2842 | /* If the base isn't a decl, then it's a pointer, and we don't | |
2843 | know where that's going to go. */ | |
2844 | if (!DECL_P (base_m)) | |
2845 | use_it = false; | |
2846 | else if (is_global_var (base_m)) | |
2847 | use_it = false; | |
0890b981 AP |
2848 | else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE |
2849 | || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE) | |
2850 | && !DECL_GIMPLE_REG_P (result) | |
2851 | && DECL_GIMPLE_REG_P (base_m)) | |
1d327c16 | 2852 | use_it = false; |
e2f9fe42 RH |
2853 | else if (!TREE_ADDRESSABLE (base_m)) |
2854 | use_it = true; | |
2855 | } | |
7740f00d RH |
2856 | |
2857 | if (use_it) | |
2858 | { | |
2859 | var = modify_dest; | |
2860 | use = NULL; | |
2861 | goto done; | |
2862 | } | |
2863 | } | |
2864 | ||
1e128c5f | 2865 | gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST); |
7740f00d | 2866 | |
c08cd4c1 | 2867 | var = copy_result_decl_to_var (result, id); |
110cfe1c JH |
2868 | if (gimple_in_ssa_p (cfun)) |
2869 | { | |
2870 | get_var_ann (var); | |
2871 | add_referenced_var (var); | |
2872 | } | |
e21aff8a | 2873 | |
7740f00d | 2874 | DECL_SEEN_IN_BIND_EXPR_P (var) = 1; |
c021f10b | 2875 | add_local_decl (DECL_STRUCT_FUNCTION (caller), var); |
7740f00d | 2876 | |
6de9cd9a | 2877 | /* Do not have the rest of GCC warn about this variable as it should |
471854f8 | 2878 | not be visible to the user. */ |
6de9cd9a | 2879 | TREE_NO_WARNING (var) = 1; |
d4e4baa9 | 2880 | |
c08cd4c1 JM |
2881 | declare_inline_vars (id->block, var); |
2882 | ||
7740f00d RH |
2883 | /* Build the use expr. If the return type of the function was |
2884 | promoted, convert it back to the expected type. */ | |
2885 | use = var; | |
f4088621 | 2886 | if (!useless_type_conversion_p (caller_type, TREE_TYPE (var))) |
7740f00d | 2887 | use = fold_convert (caller_type, var); |
b8698a0f | 2888 | |
73dab33b | 2889 | STRIP_USELESS_TYPE_CONVERSION (use); |
7740f00d | 2890 | |
c08cd4c1 | 2891 | if (DECL_BY_REFERENCE (result)) |
32848948 RG |
2892 | { |
2893 | TREE_ADDRESSABLE (var) = 1; | |
2894 | var = build_fold_addr_expr (var); | |
2895 | } | |
c08cd4c1 | 2896 | |
7740f00d | 2897 | done: |
d4e4baa9 AO |
2898 | /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that |
2899 | way, when the RESULT_DECL is encountered, it will be | |
6938f93f JH |
2900 | automatically replaced by the VAR_DECL. |
2901 | ||
2902 | When returning by reference, ensure that RESULT_DECL remaps to | |
2903 | gimple_val. */ | |
2904 | if (DECL_BY_REFERENCE (result) | |
2905 | && !is_gimple_val (var)) | |
2906 | { | |
2907 | tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr"); | |
72109b25 JH |
2908 | if (gimple_in_ssa_p (id->src_cfun)) |
2909 | { | |
2910 | get_var_ann (temp); | |
2911 | add_referenced_var (temp); | |
2912 | } | |
6938f93f | 2913 | insert_decl_map (id, result, temp); |
7e97ee1a JH |
2914 | /* When RESULT_DECL is in SSA form, we need to use it's default_def |
2915 | SSA_NAME. */ | |
2916 | if (gimple_in_ssa_p (id->src_cfun) && gimple_default_def (id->src_cfun, result)) | |
2917 | temp = remap_ssa_name (gimple_default_def (id->src_cfun, result), id); | |
6938f93f JH |
2918 | insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var)); |
2919 | } | |
2920 | else | |
2921 | insert_decl_map (id, result, var); | |
d4e4baa9 | 2922 | |
6de9cd9a DN |
2923 | /* Remember this so we can ignore it in remap_decls. */ |
2924 | id->retvar = var; | |
2925 | ||
0f900dfa | 2926 | return use; |
d4e4baa9 AO |
2927 | } |
2928 | ||
27dbd3ac RH |
2929 | /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference |
2930 | to a local label. */ | |
4838c5ee | 2931 | |
27dbd3ac RH |
2932 | static tree |
2933 | has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp) | |
4838c5ee | 2934 | { |
27dbd3ac RH |
2935 | tree node = *nodep; |
2936 | tree fn = (tree) fnp; | |
726a989a | 2937 | |
27dbd3ac RH |
2938 | if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn) |
2939 | return node; | |
2940 | ||
2941 | if (TYPE_P (node)) | |
2942 | *walk_subtrees = 0; | |
2943 | ||
2944 | return NULL_TREE; | |
2945 | } | |
726a989a | 2946 | |
27dbd3ac RH |
2947 | /* Determine if the function can be copied. If so return NULL. If |
2948 | not return a string describng the reason for failure. */ | |
2949 | ||
2950 | static const char * | |
2951 | copy_forbidden (struct function *fun, tree fndecl) | |
2952 | { | |
2953 | const char *reason = fun->cannot_be_copied_reason; | |
c021f10b NF |
2954 | tree decl; |
2955 | unsigned ix; | |
27dbd3ac RH |
2956 | |
2957 | /* Only examine the function once. */ | |
2958 | if (fun->cannot_be_copied_set) | |
2959 | return reason; | |
2960 | ||
2961 | /* We cannot copy a function that receives a non-local goto | |
2962 | because we cannot remap the destination label used in the | |
2963 | function that is performing the non-local goto. */ | |
2964 | /* ??? Actually, this should be possible, if we work at it. | |
2965 | No doubt there's just a handful of places that simply | |
2966 | assume it doesn't happen and don't substitute properly. */ | |
2967 | if (fun->has_nonlocal_label) | |
2968 | { | |
2969 | reason = G_("function %q+F can never be copied " | |
2970 | "because it receives a non-local goto"); | |
2971 | goto fail; | |
2972 | } | |
2973 | ||
c021f10b NF |
2974 | FOR_EACH_LOCAL_DECL (fun, ix, decl) |
2975 | if (TREE_CODE (decl) == VAR_DECL | |
2976 | && TREE_STATIC (decl) | |
2977 | && !DECL_EXTERNAL (decl) | |
2978 | && DECL_INITIAL (decl) | |
2979 | && walk_tree_without_duplicates (&DECL_INITIAL (decl), | |
2980 | has_label_address_in_static_1, | |
2981 | fndecl)) | |
2982 | { | |
2983 | reason = G_("function %q+F can never be copied because it saves " | |
2984 | "address of local label in a static variable"); | |
2985 | goto fail; | |
2986 | } | |
27dbd3ac RH |
2987 | |
2988 | fail: | |
2989 | fun->cannot_be_copied_reason = reason; | |
2990 | fun->cannot_be_copied_set = true; | |
2991 | return reason; | |
2992 | } | |
2993 | ||
2994 | ||
2995 | static const char *inline_forbidden_reason; | |
2996 | ||
2997 | /* A callback for walk_gimple_seq to handle statements. Returns non-null | |
2998 | iff a function can not be inlined. Also sets the reason why. */ | |
c986baf6 | 2999 | |
c986baf6 | 3000 | static tree |
726a989a RB |
3001 | inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p, |
3002 | struct walk_stmt_info *wip) | |
c986baf6 | 3003 | { |
726a989a | 3004 | tree fn = (tree) wip->info; |
f08545a8 | 3005 | tree t; |
726a989a | 3006 | gimple stmt = gsi_stmt (*gsi); |
c986baf6 | 3007 | |
726a989a | 3008 | switch (gimple_code (stmt)) |
f08545a8 | 3009 | { |
726a989a | 3010 | case GIMPLE_CALL: |
3197c4fd AS |
3011 | /* Refuse to inline alloca call unless user explicitly forced so as |
3012 | this may change program's memory overhead drastically when the | |
3013 | function using alloca is called in loop. In GCC present in | |
3014 | SPEC2000 inlining into schedule_block cause it to require 2GB of | |
3015 | RAM instead of 256MB. */ | |
726a989a | 3016 | if (gimple_alloca_call_p (stmt) |
f08545a8 JH |
3017 | && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))) |
3018 | { | |
ddd2d57e | 3019 | inline_forbidden_reason |
dee15844 | 3020 | = G_("function %q+F can never be inlined because it uses " |
ddd2d57e | 3021 | "alloca (override using the always_inline attribute)"); |
726a989a RB |
3022 | *handled_ops_p = true; |
3023 | return fn; | |
f08545a8 | 3024 | } |
726a989a RB |
3025 | |
3026 | t = gimple_call_fndecl (stmt); | |
3027 | if (t == NULL_TREE) | |
f08545a8 | 3028 | break; |
84f5e1b1 | 3029 | |
f08545a8 JH |
3030 | /* We cannot inline functions that call setjmp. */ |
3031 | if (setjmp_call_p (t)) | |
3032 | { | |
ddd2d57e | 3033 | inline_forbidden_reason |
dee15844 | 3034 | = G_("function %q+F can never be inlined because it uses setjmp"); |
726a989a RB |
3035 | *handled_ops_p = true; |
3036 | return t; | |
f08545a8 JH |
3037 | } |
3038 | ||
6de9cd9a | 3039 | if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL) |
3197c4fd | 3040 | switch (DECL_FUNCTION_CODE (t)) |
f08545a8 | 3041 | { |
3197c4fd AS |
3042 | /* We cannot inline functions that take a variable number of |
3043 | arguments. */ | |
3044 | case BUILT_IN_VA_START: | |
3197c4fd AS |
3045 | case BUILT_IN_NEXT_ARG: |
3046 | case BUILT_IN_VA_END: | |
6de9cd9a | 3047 | inline_forbidden_reason |
dee15844 | 3048 | = G_("function %q+F can never be inlined because it " |
6de9cd9a | 3049 | "uses variable argument lists"); |
726a989a RB |
3050 | *handled_ops_p = true; |
3051 | return t; | |
6de9cd9a | 3052 | |
3197c4fd | 3053 | case BUILT_IN_LONGJMP: |
6de9cd9a DN |
3054 | /* We can't inline functions that call __builtin_longjmp at |
3055 | all. The non-local goto machinery really requires the | |
3056 | destination be in a different function. If we allow the | |
3057 | function calling __builtin_longjmp to be inlined into the | |
3058 | function calling __builtin_setjmp, Things will Go Awry. */ | |
3059 | inline_forbidden_reason | |
dee15844 | 3060 | = G_("function %q+F can never be inlined because " |
6de9cd9a | 3061 | "it uses setjmp-longjmp exception handling"); |
726a989a RB |
3062 | *handled_ops_p = true; |
3063 | return t; | |
6de9cd9a DN |
3064 | |
3065 | case BUILT_IN_NONLOCAL_GOTO: | |
3066 | /* Similarly. */ | |
3067 | inline_forbidden_reason | |
dee15844 | 3068 | = G_("function %q+F can never be inlined because " |
6de9cd9a | 3069 | "it uses non-local goto"); |
726a989a RB |
3070 | *handled_ops_p = true; |
3071 | return t; | |
f08545a8 | 3072 | |
4b284111 JJ |
3073 | case BUILT_IN_RETURN: |
3074 | case BUILT_IN_APPLY_ARGS: | |
3075 | /* If a __builtin_apply_args caller would be inlined, | |
3076 | it would be saving arguments of the function it has | |
3077 | been inlined into. Similarly __builtin_return would | |
3078 | return from the function the inline has been inlined into. */ | |
3079 | inline_forbidden_reason | |
dee15844 | 3080 | = G_("function %q+F can never be inlined because " |
4b284111 | 3081 | "it uses __builtin_return or __builtin_apply_args"); |
726a989a RB |
3082 | *handled_ops_p = true; |
3083 | return t; | |
4b284111 | 3084 | |
3197c4fd AS |
3085 | default: |
3086 | break; | |
3087 | } | |
f08545a8 JH |
3088 | break; |
3089 | ||
726a989a RB |
3090 | case GIMPLE_GOTO: |
3091 | t = gimple_goto_dest (stmt); | |
f08545a8 JH |
3092 | |
3093 | /* We will not inline a function which uses computed goto. The | |
3094 | addresses of its local labels, which may be tucked into | |
3095 | global storage, are of course not constant across | |
3096 | instantiations, which causes unexpected behavior. */ | |
3097 | if (TREE_CODE (t) != LABEL_DECL) | |
3098 | { | |
ddd2d57e | 3099 | inline_forbidden_reason |
dee15844 | 3100 | = G_("function %q+F can never be inlined " |
ddd2d57e | 3101 | "because it contains a computed goto"); |
726a989a RB |
3102 | *handled_ops_p = true; |
3103 | return t; | |
f08545a8 | 3104 | } |
6de9cd9a | 3105 | break; |
f08545a8 | 3106 | |
f08545a8 JH |
3107 | default: |
3108 | break; | |
3109 | } | |
3110 | ||
726a989a | 3111 | *handled_ops_p = false; |
f08545a8 | 3112 | return NULL_TREE; |
84f5e1b1 RH |
3113 | } |
3114 | ||
726a989a RB |
3115 | /* Return true if FNDECL is a function that cannot be inlined into |
3116 | another one. */ | |
3117 | ||
3118 | static bool | |
f08545a8 | 3119 | inline_forbidden_p (tree fndecl) |
84f5e1b1 | 3120 | { |
2092ee7d | 3121 | struct function *fun = DECL_STRUCT_FUNCTION (fndecl); |
726a989a RB |
3122 | struct walk_stmt_info wi; |
3123 | struct pointer_set_t *visited_nodes; | |
3124 | basic_block bb; | |
3125 | bool forbidden_p = false; | |
3126 | ||
27dbd3ac RH |
3127 | /* First check for shared reasons not to copy the code. */ |
3128 | inline_forbidden_reason = copy_forbidden (fun, fndecl); | |
3129 | if (inline_forbidden_reason != NULL) | |
3130 | return true; | |
3131 | ||
3132 | /* Next, walk the statements of the function looking for | |
3133 | constraucts we can't handle, or are non-optimal for inlining. */ | |
726a989a RB |
3134 | visited_nodes = pointer_set_create (); |
3135 | memset (&wi, 0, sizeof (wi)); | |
3136 | wi.info = (void *) fndecl; | |
3137 | wi.pset = visited_nodes; | |
e21aff8a | 3138 | |
2092ee7d | 3139 | FOR_EACH_BB_FN (bb, fun) |
726a989a RB |
3140 | { |
3141 | gimple ret; | |
3142 | gimple_seq seq = bb_seq (bb); | |
27dbd3ac | 3143 | ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi); |
726a989a RB |
3144 | forbidden_p = (ret != NULL); |
3145 | if (forbidden_p) | |
27dbd3ac | 3146 | break; |
2092ee7d JJ |
3147 | } |
3148 | ||
726a989a | 3149 | pointer_set_destroy (visited_nodes); |
726a989a | 3150 | return forbidden_p; |
84f5e1b1 RH |
3151 | } |
3152 | ||
8f4f502f EB |
3153 | /* Return true if CALLEE cannot be inlined into CALLER. */ |
3154 | ||
3155 | static bool | |
3156 | inline_forbidden_into_p (tree caller, tree callee) | |
3157 | { | |
3158 | /* Don't inline if the functions have different EH personalities. */ | |
3159 | if (DECL_FUNCTION_PERSONALITY (caller) | |
3160 | && DECL_FUNCTION_PERSONALITY (callee) | |
3161 | && (DECL_FUNCTION_PERSONALITY (caller) | |
3162 | != DECL_FUNCTION_PERSONALITY (callee))) | |
3163 | return true; | |
3164 | ||
3165 | /* Don't inline if the callee can throw non-call exceptions but the | |
3166 | caller cannot. */ | |
3167 | if (DECL_STRUCT_FUNCTION (callee) | |
3168 | && DECL_STRUCT_FUNCTION (callee)->can_throw_non_call_exceptions | |
3169 | && !(DECL_STRUCT_FUNCTION (caller) | |
3170 | && DECL_STRUCT_FUNCTION (caller)->can_throw_non_call_exceptions)) | |
3171 | return true; | |
3172 | ||
3173 | return false; | |
3174 | } | |
3175 | ||
b3c3af2f SB |
3176 | /* Returns nonzero if FN is a function that does not have any |
3177 | fundamental inline blocking properties. */ | |
d4e4baa9 | 3178 | |
27dbd3ac RH |
3179 | bool |
3180 | tree_inlinable_function_p (tree fn) | |
d4e4baa9 | 3181 | { |
b3c3af2f | 3182 | bool inlinable = true; |
18177c7e RG |
3183 | bool do_warning; |
3184 | tree always_inline; | |
d4e4baa9 AO |
3185 | |
3186 | /* If we've already decided this function shouldn't be inlined, | |
3187 | there's no need to check again. */ | |
3188 | if (DECL_UNINLINABLE (fn)) | |
b3c3af2f | 3189 | return false; |
d4e4baa9 | 3190 | |
18177c7e RG |
3191 | /* We only warn for functions declared `inline' by the user. */ |
3192 | do_warning = (warn_inline | |
18177c7e | 3193 | && DECL_DECLARED_INLINE_P (fn) |
0494626a | 3194 | && !DECL_NO_INLINE_WARNING_P (fn) |
18177c7e RG |
3195 | && !DECL_IN_SYSTEM_HEADER (fn)); |
3196 | ||
3197 | always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)); | |
3198 | ||
e90acd93 | 3199 | if (flag_no_inline |
18177c7e RG |
3200 | && always_inline == NULL) |
3201 | { | |
3202 | if (do_warning) | |
3203 | warning (OPT_Winline, "function %q+F can never be inlined because it " | |
3204 | "is suppressed using -fno-inline", fn); | |
3205 | inlinable = false; | |
3206 | } | |
3207 | ||
18177c7e RG |
3208 | else if (!function_attribute_inlinable_p (fn)) |
3209 | { | |
3210 | if (do_warning) | |
3211 | warning (OPT_Winline, "function %q+F can never be inlined because it " | |
3212 | "uses attributes conflicting with inlining", fn); | |
3213 | inlinable = false; | |
3214 | } | |
46c5ad27 | 3215 | |
f08545a8 | 3216 | else if (inline_forbidden_p (fn)) |
b3c3af2f SB |
3217 | { |
3218 | /* See if we should warn about uninlinable functions. Previously, | |
3219 | some of these warnings would be issued while trying to expand | |
3220 | the function inline, but that would cause multiple warnings | |
3221 | about functions that would for example call alloca. But since | |
3222 | this a property of the function, just one warning is enough. | |
3223 | As a bonus we can now give more details about the reason why a | |
18177c7e RG |
3224 | function is not inlinable. */ |
3225 | if (always_inline) | |
dee15844 | 3226 | sorry (inline_forbidden_reason, fn); |
2d327012 | 3227 | else if (do_warning) |
d2fcbf6f | 3228 | warning (OPT_Winline, inline_forbidden_reason, fn); |
b3c3af2f SB |
3229 | |
3230 | inlinable = false; | |
3231 | } | |
d4e4baa9 AO |
3232 | |
3233 | /* Squirrel away the result so that we don't have to check again. */ | |
b3c3af2f | 3234 | DECL_UNINLINABLE (fn) = !inlinable; |
d4e4baa9 | 3235 | |
b3c3af2f SB |
3236 | return inlinable; |
3237 | } | |
3238 | ||
e5c4f28a RG |
3239 | /* Estimate the cost of a memory move. Use machine dependent |
3240 | word size and take possible memcpy call into account. */ | |
3241 | ||
3242 | int | |
3243 | estimate_move_cost (tree type) | |
3244 | { | |
3245 | HOST_WIDE_INT size; | |
3246 | ||
078c3644 JH |
3247 | gcc_assert (!VOID_TYPE_P (type)); |
3248 | ||
e5c4f28a RG |
3249 | size = int_size_in_bytes (type); |
3250 | ||
e04ad03d | 3251 | if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size)) |
e5c4f28a RG |
3252 | /* Cost of a memcpy call, 3 arguments and the call. */ |
3253 | return 4; | |
3254 | else | |
3255 | return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES); | |
3256 | } | |
3257 | ||
726a989a | 3258 | /* Returns cost of operation CODE, according to WEIGHTS */ |
7f9bc51b | 3259 | |
726a989a | 3260 | static int |
02f0b13a JH |
3261 | estimate_operator_cost (enum tree_code code, eni_weights *weights, |
3262 | tree op1 ATTRIBUTE_UNUSED, tree op2) | |
6de9cd9a | 3263 | { |
726a989a | 3264 | switch (code) |
6de9cd9a | 3265 | { |
726a989a RB |
3266 | /* These are "free" conversions, or their presumed cost |
3267 | is folded into other operations. */ | |
61fcaeec | 3268 | case RANGE_EXPR: |
1a87cf0c | 3269 | CASE_CONVERT: |
726a989a RB |
3270 | case COMPLEX_EXPR: |
3271 | case PAREN_EXPR: | |
726a989a | 3272 | return 0; |
6de9cd9a | 3273 | |
e5c4f28a RG |
3274 | /* Assign cost of 1 to usual operations. |
3275 | ??? We may consider mapping RTL costs to this. */ | |
6de9cd9a | 3276 | case COND_EXPR: |
4151978d | 3277 | case VEC_COND_EXPR: |
6de9cd9a DN |
3278 | |
3279 | case PLUS_EXPR: | |
5be014d5 | 3280 | case POINTER_PLUS_EXPR: |
6de9cd9a DN |
3281 | case MINUS_EXPR: |
3282 | case MULT_EXPR: | |
3283 | ||
09e881c9 | 3284 | case ADDR_SPACE_CONVERT_EXPR: |
325217ed | 3285 | case FIXED_CONVERT_EXPR: |
6de9cd9a | 3286 | case FIX_TRUNC_EXPR: |
6de9cd9a DN |
3287 | |
3288 | case NEGATE_EXPR: | |
3289 | case FLOAT_EXPR: | |
3290 | case MIN_EXPR: | |
3291 | case MAX_EXPR: | |
3292 | case ABS_EXPR: | |
3293 | ||
3294 | case LSHIFT_EXPR: | |
3295 | case RSHIFT_EXPR: | |
3296 | case LROTATE_EXPR: | |
3297 | case RROTATE_EXPR: | |
a6b46ba2 DN |
3298 | case VEC_LSHIFT_EXPR: |
3299 | case VEC_RSHIFT_EXPR: | |
6de9cd9a DN |
3300 | |
3301 | case BIT_IOR_EXPR: | |
3302 | case BIT_XOR_EXPR: | |
3303 | case BIT_AND_EXPR: | |
3304 | case BIT_NOT_EXPR: | |
3305 | ||
3306 | case TRUTH_ANDIF_EXPR: | |
3307 | case TRUTH_ORIF_EXPR: | |
3308 | case TRUTH_AND_EXPR: | |
3309 | case TRUTH_OR_EXPR: | |
3310 | case TRUTH_XOR_EXPR: | |
3311 | case TRUTH_NOT_EXPR: | |
3312 | ||
3313 | case LT_EXPR: | |
3314 | case LE_EXPR: | |
3315 | case GT_EXPR: | |
3316 | case GE_EXPR: | |
3317 | case EQ_EXPR: | |
3318 | case NE_EXPR: | |
3319 | case ORDERED_EXPR: | |
3320 | case UNORDERED_EXPR: | |
3321 | ||
3322 | case UNLT_EXPR: | |
3323 | case UNLE_EXPR: | |
3324 | case UNGT_EXPR: | |
3325 | case UNGE_EXPR: | |
3326 | case UNEQ_EXPR: | |
d1a7edaf | 3327 | case LTGT_EXPR: |
6de9cd9a | 3328 | |
6de9cd9a DN |
3329 | case CONJ_EXPR: |
3330 | ||
3331 | case PREDECREMENT_EXPR: | |
3332 | case PREINCREMENT_EXPR: | |
3333 | case POSTDECREMENT_EXPR: | |
3334 | case POSTINCREMENT_EXPR: | |
3335 | ||
16630a2c DN |
3336 | case REALIGN_LOAD_EXPR: |
3337 | ||
61d3cdbb DN |
3338 | case REDUC_MAX_EXPR: |
3339 | case REDUC_MIN_EXPR: | |
3340 | case REDUC_PLUS_EXPR: | |
20f06221 | 3341 | case WIDEN_SUM_EXPR: |
726a989a RB |
3342 | case WIDEN_MULT_EXPR: |
3343 | case DOT_PROD_EXPR: | |
0354c0c7 BS |
3344 | case WIDEN_MULT_PLUS_EXPR: |
3345 | case WIDEN_MULT_MINUS_EXPR: | |
726a989a | 3346 | |
89d67cca DN |
3347 | case VEC_WIDEN_MULT_HI_EXPR: |
3348 | case VEC_WIDEN_MULT_LO_EXPR: | |
3349 | case VEC_UNPACK_HI_EXPR: | |
3350 | case VEC_UNPACK_LO_EXPR: | |
d9987fb4 UB |
3351 | case VEC_UNPACK_FLOAT_HI_EXPR: |
3352 | case VEC_UNPACK_FLOAT_LO_EXPR: | |
8115817b | 3353 | case VEC_PACK_TRUNC_EXPR: |
89d67cca | 3354 | case VEC_PACK_SAT_EXPR: |
d9987fb4 | 3355 | case VEC_PACK_FIX_TRUNC_EXPR: |
98b44b0e IR |
3356 | case VEC_EXTRACT_EVEN_EXPR: |
3357 | case VEC_EXTRACT_ODD_EXPR: | |
3358 | case VEC_INTERLEAVE_HIGH_EXPR: | |
3359 | case VEC_INTERLEAVE_LOW_EXPR: | |
3360 | ||
726a989a | 3361 | return 1; |
6de9cd9a | 3362 | |
1ea7e6ad | 3363 | /* Few special cases of expensive operations. This is useful |
6de9cd9a DN |
3364 | to avoid inlining on functions having too many of these. */ |
3365 | case TRUNC_DIV_EXPR: | |
3366 | case CEIL_DIV_EXPR: | |
3367 | case FLOOR_DIV_EXPR: | |
3368 | case ROUND_DIV_EXPR: | |
3369 | case EXACT_DIV_EXPR: | |
3370 | case TRUNC_MOD_EXPR: | |
3371 | case CEIL_MOD_EXPR: | |
3372 | case FLOOR_MOD_EXPR: | |
3373 | case ROUND_MOD_EXPR: | |
3374 | case RDIV_EXPR: | |
02f0b13a JH |
3375 | if (TREE_CODE (op2) != INTEGER_CST) |
3376 | return weights->div_mod_cost; | |
3377 | return 1; | |
726a989a RB |
3378 | |
3379 | default: | |
3380 | /* We expect a copy assignment with no operator. */ | |
3381 | gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS); | |
3382 | return 0; | |
3383 | } | |
3384 | } | |
3385 | ||
3386 | ||
3387 | /* Estimate number of instructions that will be created by expanding | |
3388 | the statements in the statement sequence STMTS. | |
3389 | WEIGHTS contains weights attributed to various constructs. */ | |
3390 | ||
3391 | static | |
3392 | int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights) | |
3393 | { | |
3394 | int cost; | |
3395 | gimple_stmt_iterator gsi; | |
3396 | ||
3397 | cost = 0; | |
3398 | for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3399 | cost += estimate_num_insns (gsi_stmt (gsi), weights); | |
3400 | ||
3401 | return cost; | |
3402 | } | |
3403 | ||
3404 | ||
3405 | /* Estimate number of instructions that will be created by expanding STMT. | |
3406 | WEIGHTS contains weights attributed to various constructs. */ | |
3407 | ||
3408 | int | |
3409 | estimate_num_insns (gimple stmt, eni_weights *weights) | |
3410 | { | |
3411 | unsigned cost, i; | |
3412 | enum gimple_code code = gimple_code (stmt); | |
3413 | tree lhs; | |
02f0b13a | 3414 | tree rhs; |
726a989a RB |
3415 | |
3416 | switch (code) | |
3417 | { | |
3418 | case GIMPLE_ASSIGN: | |
3419 | /* Try to estimate the cost of assignments. We have three cases to | |
3420 | deal with: | |
3421 | 1) Simple assignments to registers; | |
3422 | 2) Stores to things that must live in memory. This includes | |
3423 | "normal" stores to scalars, but also assignments of large | |
3424 | structures, or constructors of big arrays; | |
3425 | ||
3426 | Let us look at the first two cases, assuming we have "a = b + C": | |
3427 | <GIMPLE_ASSIGN <var_decl "a"> | |
3428 | <plus_expr <var_decl "b"> <constant C>> | |
3429 | If "a" is a GIMPLE register, the assignment to it is free on almost | |
3430 | any target, because "a" usually ends up in a real register. Hence | |
3431 | the only cost of this expression comes from the PLUS_EXPR, and we | |
3432 | can ignore the GIMPLE_ASSIGN. | |
3433 | If "a" is not a GIMPLE register, the assignment to "a" will most | |
3434 | likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost | |
3435 | of moving something into "a", which we compute using the function | |
3436 | estimate_move_cost. */ | |
3437 | lhs = gimple_assign_lhs (stmt); | |
02f0b13a JH |
3438 | rhs = gimple_assign_rhs1 (stmt); |
3439 | ||
726a989a RB |
3440 | if (is_gimple_reg (lhs)) |
3441 | cost = 0; | |
3442 | else | |
3443 | cost = estimate_move_cost (TREE_TYPE (lhs)); | |
3444 | ||
02f0b13a JH |
3445 | if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs)) |
3446 | cost += estimate_move_cost (TREE_TYPE (rhs)); | |
3447 | ||
3448 | cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights, | |
3449 | gimple_assign_rhs1 (stmt), | |
3450 | get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
3451 | == GIMPLE_BINARY_RHS | |
3452 | ? gimple_assign_rhs2 (stmt) : NULL); | |
726a989a RB |
3453 | break; |
3454 | ||
3455 | case GIMPLE_COND: | |
02f0b13a JH |
3456 | cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights, |
3457 | gimple_op (stmt, 0), | |
3458 | gimple_op (stmt, 1)); | |
726a989a RB |
3459 | break; |
3460 | ||
3461 | case GIMPLE_SWITCH: | |
3462 | /* Take into account cost of the switch + guess 2 conditional jumps for | |
b8698a0f | 3463 | each case label. |
726a989a RB |
3464 | |
3465 | TODO: once the switch expansion logic is sufficiently separated, we can | |
3466 | do better job on estimating cost of the switch. */ | |
02f0b13a JH |
3467 | if (weights->time_based) |
3468 | cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2; | |
3469 | else | |
3470 | cost = gimple_switch_num_labels (stmt) * 2; | |
6de9cd9a | 3471 | break; |
726a989a RB |
3472 | |
3473 | case GIMPLE_CALL: | |
6de9cd9a | 3474 | { |
726a989a RB |
3475 | tree decl = gimple_call_fndecl (stmt); |
3476 | tree addr = gimple_call_fn (stmt); | |
8723e2fe | 3477 | tree funtype = TREE_TYPE (addr); |
613f61fc | 3478 | bool stdarg = false; |
8723e2fe | 3479 | |
726a989a RB |
3480 | if (POINTER_TYPE_P (funtype)) |
3481 | funtype = TREE_TYPE (funtype); | |
6de9cd9a | 3482 | |
bec922f0 SL |
3483 | if (is_simple_builtin (decl)) |
3484 | return 0; | |
3485 | else if (is_inexpensive_builtin (decl)) | |
726a989a | 3486 | cost = weights->target_builtin_call_cost; |
625a2efb | 3487 | else |
726a989a | 3488 | cost = weights->call_cost; |
b8698a0f | 3489 | |
8723e2fe JH |
3490 | if (decl) |
3491 | funtype = TREE_TYPE (decl); | |
3492 | ||
02f0b13a JH |
3493 | if (!VOID_TYPE_P (TREE_TYPE (funtype))) |
3494 | cost += estimate_move_cost (TREE_TYPE (funtype)); | |
613f61fc JH |
3495 | |
3496 | if (funtype) | |
3497 | stdarg = stdarg_p (funtype); | |
3498 | ||
726a989a RB |
3499 | /* Our cost must be kept in sync with |
3500 | cgraph_estimate_size_after_inlining that does use function | |
613f61fc JH |
3501 | declaration to figure out the arguments. |
3502 | ||
3503 | For functions taking variable list of arguments we must | |
3504 | look into call statement intself. This is safe because | |
3505 | we will get only higher costs and in most cases we will | |
3506 | not inline these anyway. */ | |
3507 | if (decl && DECL_ARGUMENTS (decl) && !stdarg) | |
8723e2fe JH |
3508 | { |
3509 | tree arg; | |
910ad8de | 3510 | for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg)) |
078c3644 JH |
3511 | if (!VOID_TYPE_P (TREE_TYPE (arg))) |
3512 | cost += estimate_move_cost (TREE_TYPE (arg)); | |
8723e2fe | 3513 | } |
613f61fc | 3514 | else if (funtype && prototype_p (funtype) && !stdarg) |
8723e2fe JH |
3515 | { |
3516 | tree t; | |
078c3644 JH |
3517 | for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node; |
3518 | t = TREE_CHAIN (t)) | |
3519 | if (!VOID_TYPE_P (TREE_VALUE (t))) | |
3520 | cost += estimate_move_cost (TREE_VALUE (t)); | |
8723e2fe JH |
3521 | } |
3522 | else | |
c7f599d0 | 3523 | { |
726a989a RB |
3524 | for (i = 0; i < gimple_call_num_args (stmt); i++) |
3525 | { | |
3526 | tree arg = gimple_call_arg (stmt, i); | |
078c3644 JH |
3527 | if (!VOID_TYPE_P (TREE_TYPE (arg))) |
3528 | cost += estimate_move_cost (TREE_TYPE (arg)); | |
726a989a | 3529 | } |
c7f599d0 | 3530 | } |
e5c4f28a | 3531 | |
6de9cd9a DN |
3532 | break; |
3533 | } | |
88f4034b | 3534 | |
726a989a RB |
3535 | case GIMPLE_GOTO: |
3536 | case GIMPLE_LABEL: | |
3537 | case GIMPLE_NOP: | |
3538 | case GIMPLE_PHI: | |
3539 | case GIMPLE_RETURN: | |
726a989a | 3540 | case GIMPLE_PREDICT: |
b5b8b0ac | 3541 | case GIMPLE_DEBUG: |
726a989a RB |
3542 | return 0; |
3543 | ||
3544 | case GIMPLE_ASM: | |
2bd1d2c8 | 3545 | return asm_str_count (gimple_asm_string (stmt)); |
726a989a | 3546 | |
1d65f45c RH |
3547 | case GIMPLE_RESX: |
3548 | /* This is either going to be an external function call with one | |
3549 | argument, or two register copy statements plus a goto. */ | |
3550 | return 2; | |
3551 | ||
3552 | case GIMPLE_EH_DISPATCH: | |
3553 | /* ??? This is going to turn into a switch statement. Ideally | |
3554 | we'd have a look at the eh region and estimate the number of | |
3555 | edges involved. */ | |
3556 | return 10; | |
3557 | ||
726a989a RB |
3558 | case GIMPLE_BIND: |
3559 | return estimate_num_insns_seq (gimple_bind_body (stmt), weights); | |
3560 | ||
3561 | case GIMPLE_EH_FILTER: | |
3562 | return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights); | |
3563 | ||
3564 | case GIMPLE_CATCH: | |
3565 | return estimate_num_insns_seq (gimple_catch_handler (stmt), weights); | |
3566 | ||
3567 | case GIMPLE_TRY: | |
3568 | return (estimate_num_insns_seq (gimple_try_eval (stmt), weights) | |
3569 | + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights)); | |
3570 | ||
3571 | /* OpenMP directives are generally very expensive. */ | |
3572 | ||
3573 | case GIMPLE_OMP_RETURN: | |
3574 | case GIMPLE_OMP_SECTIONS_SWITCH: | |
3575 | case GIMPLE_OMP_ATOMIC_STORE: | |
3576 | case GIMPLE_OMP_CONTINUE: | |
3577 | /* ...except these, which are cheap. */ | |
3578 | return 0; | |
3579 | ||
3580 | case GIMPLE_OMP_ATOMIC_LOAD: | |
3581 | return weights->omp_cost; | |
3582 | ||
3583 | case GIMPLE_OMP_FOR: | |
3584 | return (weights->omp_cost | |
3585 | + estimate_num_insns_seq (gimple_omp_body (stmt), weights) | |
3586 | + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights)); | |
3587 | ||
3588 | case GIMPLE_OMP_PARALLEL: | |
3589 | case GIMPLE_OMP_TASK: | |
3590 | case GIMPLE_OMP_CRITICAL: | |
3591 | case GIMPLE_OMP_MASTER: | |
3592 | case GIMPLE_OMP_ORDERED: | |
3593 | case GIMPLE_OMP_SECTION: | |
3594 | case GIMPLE_OMP_SECTIONS: | |
3595 | case GIMPLE_OMP_SINGLE: | |
3596 | return (weights->omp_cost | |
3597 | + estimate_num_insns_seq (gimple_omp_body (stmt), weights)); | |
88f4034b | 3598 | |
6de9cd9a | 3599 | default: |
1e128c5f | 3600 | gcc_unreachable (); |
6de9cd9a | 3601 | } |
726a989a RB |
3602 | |
3603 | return cost; | |
6de9cd9a DN |
3604 | } |
3605 | ||
726a989a RB |
3606 | /* Estimate number of instructions that will be created by expanding |
3607 | function FNDECL. WEIGHTS contains weights attributed to various | |
3608 | constructs. */ | |
aa4a53af | 3609 | |
6de9cd9a | 3610 | int |
726a989a | 3611 | estimate_num_insns_fn (tree fndecl, eni_weights *weights) |
6de9cd9a | 3612 | { |
726a989a RB |
3613 | struct function *my_function = DECL_STRUCT_FUNCTION (fndecl); |
3614 | gimple_stmt_iterator bsi; | |
e21aff8a | 3615 | basic_block bb; |
726a989a | 3616 | int n = 0; |
e21aff8a | 3617 | |
726a989a RB |
3618 | gcc_assert (my_function && my_function->cfg); |
3619 | FOR_EACH_BB_FN (bb, my_function) | |
e21aff8a | 3620 | { |
726a989a RB |
3621 | for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) |
3622 | n += estimate_num_insns (gsi_stmt (bsi), weights); | |
e21aff8a | 3623 | } |
e21aff8a | 3624 | |
726a989a | 3625 | return n; |
7f9bc51b ZD |
3626 | } |
3627 | ||
726a989a | 3628 | |
7f9bc51b ZD |
3629 | /* Initializes weights used by estimate_num_insns. */ |
3630 | ||
3631 | void | |
3632 | init_inline_once (void) | |
3633 | { | |
7f9bc51b | 3634 | eni_size_weights.call_cost = 1; |
625a2efb | 3635 | eni_size_weights.target_builtin_call_cost = 1; |
7f9bc51b | 3636 | eni_size_weights.div_mod_cost = 1; |
7f9bc51b | 3637 | eni_size_weights.omp_cost = 40; |
02f0b13a | 3638 | eni_size_weights.time_based = false; |
7f9bc51b ZD |
3639 | |
3640 | /* Estimating time for call is difficult, since we have no idea what the | |
3641 | called function does. In the current uses of eni_time_weights, | |
3642 | underestimating the cost does less harm than overestimating it, so | |
ea2c620c | 3643 | we choose a rather small value here. */ |
7f9bc51b | 3644 | eni_time_weights.call_cost = 10; |
625a2efb | 3645 | eni_time_weights.target_builtin_call_cost = 10; |
7f9bc51b | 3646 | eni_time_weights.div_mod_cost = 10; |
7f9bc51b | 3647 | eni_time_weights.omp_cost = 40; |
02f0b13a | 3648 | eni_time_weights.time_based = true; |
6de9cd9a DN |
3649 | } |
3650 | ||
726a989a RB |
3651 | /* Estimate the number of instructions in a gimple_seq. */ |
3652 | ||
3653 | int | |
3654 | count_insns_seq (gimple_seq seq, eni_weights *weights) | |
3655 | { | |
3656 | gimple_stmt_iterator gsi; | |
3657 | int n = 0; | |
3658 | for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3659 | n += estimate_num_insns (gsi_stmt (gsi), weights); | |
3660 | ||
3661 | return n; | |
3662 | } | |
3663 | ||
3664 | ||
e21aff8a | 3665 | /* Install new lexical TREE_BLOCK underneath 'current_block'. */ |
726a989a | 3666 | |
e21aff8a | 3667 | static void |
4a283090 | 3668 | prepend_lexical_block (tree current_block, tree new_block) |
e21aff8a | 3669 | { |
4a283090 JH |
3670 | BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block); |
3671 | BLOCK_SUBBLOCKS (current_block) = new_block; | |
e21aff8a | 3672 | BLOCK_SUPERCONTEXT (new_block) = current_block; |
e21aff8a SB |
3673 | } |
3674 | ||
c021f10b NF |
3675 | /* Add local variables from CALLEE to CALLER. */ |
3676 | ||
3677 | static inline void | |
3678 | add_local_variables (struct function *callee, struct function *caller, | |
3679 | copy_body_data *id, bool check_var_ann) | |
3680 | { | |
3681 | tree var; | |
3682 | unsigned ix; | |
3683 | ||
3684 | FOR_EACH_LOCAL_DECL (callee, ix, var) | |
3685 | if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var)) | |
3686 | { | |
3687 | if (!check_var_ann | |
3688 | || (var_ann (var) && add_referenced_var (var))) | |
3689 | add_local_decl (caller, var); | |
3690 | } | |
3691 | else if (!can_be_nonlocal (var, id)) | |
42694189 JJ |
3692 | { |
3693 | tree new_var = remap_decl (var, id); | |
3694 | ||
3695 | /* Remap debug-expressions. */ | |
3696 | if (TREE_CODE (new_var) == VAR_DECL | |
3697 | && DECL_DEBUG_EXPR_IS_FROM (new_var) | |
3698 | && new_var != var) | |
3699 | { | |
3700 | tree tem = DECL_DEBUG_EXPR (var); | |
3701 | bool old_regimplify = id->regimplify; | |
3702 | id->remapping_type_depth++; | |
3703 | walk_tree (&tem, copy_tree_body_r, id, NULL); | |
3704 | id->remapping_type_depth--; | |
3705 | id->regimplify = old_regimplify; | |
3706 | SET_DECL_DEBUG_EXPR (new_var, tem); | |
3707 | } | |
3708 | add_local_decl (caller, new_var); | |
3709 | } | |
c021f10b NF |
3710 | } |
3711 | ||
726a989a | 3712 | /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */ |
d4e4baa9 | 3713 | |
e21aff8a | 3714 | static bool |
726a989a | 3715 | expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id) |
d4e4baa9 | 3716 | { |
0f900dfa | 3717 | tree use_retvar; |
d436bff8 | 3718 | tree fn; |
b5b8b0ac | 3719 | struct pointer_map_t *st, *dst; |
110cfe1c | 3720 | tree return_slot; |
7740f00d | 3721 | tree modify_dest; |
6de9cd9a | 3722 | location_t saved_location; |
e21aff8a | 3723 | struct cgraph_edge *cg_edge; |
61a05df1 | 3724 | cgraph_inline_failed_t reason; |
e21aff8a SB |
3725 | basic_block return_block; |
3726 | edge e; | |
726a989a | 3727 | gimple_stmt_iterator gsi, stmt_gsi; |
e21aff8a | 3728 | bool successfully_inlined = FALSE; |
4f6c2131 | 3729 | bool purge_dead_abnormal_edges; |
d4e4baa9 | 3730 | |
6de9cd9a DN |
3731 | /* Set input_location here so we get the right instantiation context |
3732 | if we call instantiate_decl from inlinable_function_p. */ | |
3733 | saved_location = input_location; | |
726a989a RB |
3734 | if (gimple_has_location (stmt)) |
3735 | input_location = gimple_location (stmt); | |
6de9cd9a | 3736 | |
d4e4baa9 | 3737 | /* From here on, we're only interested in CALL_EXPRs. */ |
726a989a | 3738 | if (gimple_code (stmt) != GIMPLE_CALL) |
6de9cd9a | 3739 | goto egress; |
d4e4baa9 AO |
3740 | |
3741 | /* First, see if we can figure out what function is being called. | |
3742 | If we cannot, then there is no hope of inlining the function. */ | |
726a989a | 3743 | fn = gimple_call_fndecl (stmt); |
d4e4baa9 | 3744 | if (!fn) |
3949c4a7 | 3745 | goto egress; |
d4e4baa9 | 3746 | |
b58b1157 | 3747 | /* Turn forward declarations into real ones. */ |
d4d1ebc1 | 3748 | fn = cgraph_node (fn)->decl; |
b58b1157 | 3749 | |
726a989a | 3750 | /* If FN is a declaration of a function in a nested scope that was |
a1a0fd4e AO |
3751 | globally declared inline, we don't set its DECL_INITIAL. |
3752 | However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the | |
3753 | C++ front-end uses it for cdtors to refer to their internal | |
3754 | declarations, that are not real functions. Fortunately those | |
3755 | don't have trees to be saved, so we can tell by checking their | |
726a989a RB |
3756 | gimple_body. */ |
3757 | if (!DECL_INITIAL (fn) | |
a1a0fd4e | 3758 | && DECL_ABSTRACT_ORIGIN (fn) |
39ecc018 | 3759 | && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn))) |
a1a0fd4e AO |
3760 | fn = DECL_ABSTRACT_ORIGIN (fn); |
3761 | ||
18c6ada9 JH |
3762 | /* Objective C and fortran still calls tree_rest_of_compilation directly. |
3763 | Kill this check once this is fixed. */ | |
1b369fae | 3764 | if (!id->dst_node->analyzed) |
6de9cd9a | 3765 | goto egress; |
18c6ada9 | 3766 | |
1b369fae | 3767 | cg_edge = cgraph_edge (id->dst_node, stmt); |
18c6ada9 | 3768 | |
8f4f502f EB |
3769 | /* First check that inlining isn't simply forbidden in this case. */ |
3770 | if (inline_forbidden_into_p (cg_edge->caller->decl, cg_edge->callee->decl)) | |
f9417da1 RG |
3771 | goto egress; |
3772 | ||
8f4f502f | 3773 | /* Don't try to inline functions that are not well-suited to inlining. */ |
e21aff8a | 3774 | if (!cgraph_inline_p (cg_edge, &reason)) |
a833faa5 | 3775 | { |
3e293154 MJ |
3776 | /* If this call was originally indirect, we do not want to emit any |
3777 | inlining related warnings or sorry messages because there are no | |
3778 | guarantees regarding those. */ | |
e33c6cd6 | 3779 | if (cg_edge->indirect_inlining_edge) |
3e293154 MJ |
3780 | goto egress; |
3781 | ||
7fac66d4 JH |
3782 | if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)) |
3783 | /* Avoid warnings during early inline pass. */ | |
7e8b322a | 3784 | && cgraph_global_info_ready) |
2d327012 | 3785 | { |
61a05df1 | 3786 | sorry ("inlining failed in call to %q+F: %s", fn, |
49c8958b | 3787 | _(cgraph_inline_failed_string (reason))); |
2d327012 JH |
3788 | sorry ("called from here"); |
3789 | } | |
3790 | else if (warn_inline && DECL_DECLARED_INLINE_P (fn) | |
3791 | && !DECL_IN_SYSTEM_HEADER (fn) | |
61a05df1 | 3792 | && reason != CIF_UNSPECIFIED |
d63db217 JH |
3793 | && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)) |
3794 | /* Avoid warnings during early inline pass. */ | |
7e8b322a | 3795 | && cgraph_global_info_ready) |
a833faa5 | 3796 | { |
dee15844 | 3797 | warning (OPT_Winline, "inlining failed in call to %q+F: %s", |
49c8958b | 3798 | fn, _(cgraph_inline_failed_string (reason))); |
3176a0c2 | 3799 | warning (OPT_Winline, "called from here"); |
a833faa5 | 3800 | } |
6de9cd9a | 3801 | goto egress; |
a833faa5 | 3802 | } |
ea99e0be | 3803 | fn = cg_edge->callee->decl; |
d4e4baa9 | 3804 | |
18c6ada9 | 3805 | #ifdef ENABLE_CHECKING |
1b369fae | 3806 | if (cg_edge->callee->decl != id->dst_node->decl) |
e21aff8a | 3807 | verify_cgraph_node (cg_edge->callee); |
18c6ada9 JH |
3808 | #endif |
3809 | ||
e21aff8a | 3810 | /* We will be inlining this callee. */ |
1d65f45c | 3811 | id->eh_lp_nr = lookup_stmt_eh_lp (stmt); |
e21aff8a | 3812 | |
f9417da1 RG |
3813 | /* Update the callers EH personality. */ |
3814 | if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)) | |
3815 | DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl) | |
3816 | = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl); | |
3817 | ||
726a989a | 3818 | /* Split the block holding the GIMPLE_CALL. */ |
e21aff8a SB |
3819 | e = split_block (bb, stmt); |
3820 | bb = e->src; | |
3821 | return_block = e->dest; | |
3822 | remove_edge (e); | |
3823 | ||
4f6c2131 EB |
3824 | /* split_block splits after the statement; work around this by |
3825 | moving the call into the second block manually. Not pretty, | |
3826 | but seems easier than doing the CFG manipulation by hand | |
726a989a RB |
3827 | when the GIMPLE_CALL is in the last statement of BB. */ |
3828 | stmt_gsi = gsi_last_bb (bb); | |
3829 | gsi_remove (&stmt_gsi, false); | |
4f6c2131 | 3830 | |
726a989a | 3831 | /* If the GIMPLE_CALL was in the last statement of BB, it may have |
4f6c2131 EB |
3832 | been the source of abnormal edges. In this case, schedule |
3833 | the removal of dead abnormal edges. */ | |
726a989a RB |
3834 | gsi = gsi_start_bb (return_block); |
3835 | if (gsi_end_p (gsi)) | |
e21aff8a | 3836 | { |
726a989a | 3837 | gsi_insert_after (&gsi, stmt, GSI_NEW_STMT); |
4f6c2131 | 3838 | purge_dead_abnormal_edges = true; |
e21aff8a | 3839 | } |
4f6c2131 EB |
3840 | else |
3841 | { | |
726a989a | 3842 | gsi_insert_before (&gsi, stmt, GSI_NEW_STMT); |
4f6c2131 EB |
3843 | purge_dead_abnormal_edges = false; |
3844 | } | |
3845 | ||
726a989a | 3846 | stmt_gsi = gsi_start_bb (return_block); |
742a37d5 | 3847 | |
d436bff8 AH |
3848 | /* Build a block containing code to initialize the arguments, the |
3849 | actual inline expansion of the body, and a label for the return | |
3850 | statements within the function to jump to. The type of the | |
3851 | statement expression is the return type of the function call. */ | |
e21aff8a SB |
3852 | id->block = make_node (BLOCK); |
3853 | BLOCK_ABSTRACT_ORIGIN (id->block) = fn; | |
3e2844cb | 3854 | BLOCK_SOURCE_LOCATION (id->block) = input_location; |
4a283090 | 3855 | prepend_lexical_block (gimple_block (stmt), id->block); |
e21aff8a | 3856 | |
d4e4baa9 AO |
3857 | /* Local declarations will be replaced by their equivalents in this |
3858 | map. */ | |
3859 | st = id->decl_map; | |
6be42dd4 | 3860 | id->decl_map = pointer_map_create (); |
b5b8b0ac AO |
3861 | dst = id->debug_map; |
3862 | id->debug_map = NULL; | |
d4e4baa9 | 3863 | |
e21aff8a | 3864 | /* Record the function we are about to inline. */ |
1b369fae RH |
3865 | id->src_fn = fn; |
3866 | id->src_node = cg_edge->callee; | |
110cfe1c | 3867 | id->src_cfun = DECL_STRUCT_FUNCTION (fn); |
726a989a | 3868 | id->gimple_call = stmt; |
1b369fae | 3869 | |
3c8da8a5 AO |
3870 | gcc_assert (!id->src_cfun->after_inlining); |
3871 | ||
045685a9 | 3872 | id->entry_bb = bb; |
7299cb99 JH |
3873 | if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn))) |
3874 | { | |
3875 | gimple_stmt_iterator si = gsi_last_bb (bb); | |
3876 | gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION, | |
3877 | NOT_TAKEN), | |
3878 | GSI_NEW_STMT); | |
3879 | } | |
726a989a | 3880 | initialize_inlined_parameters (id, stmt, fn, bb); |
d4e4baa9 | 3881 | |
ea99e0be | 3882 | if (DECL_INITIAL (fn)) |
4a283090 | 3883 | prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id)); |
acb8f212 | 3884 | |
d4e4baa9 AO |
3885 | /* Return statements in the function body will be replaced by jumps |
3886 | to the RET_LABEL. */ | |
1e128c5f GB |
3887 | gcc_assert (DECL_INITIAL (fn)); |
3888 | gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK); | |
23700f65 | 3889 | |
726a989a | 3890 | /* Find the LHS to which the result of this call is assigned. */ |
110cfe1c | 3891 | return_slot = NULL; |
726a989a | 3892 | if (gimple_call_lhs (stmt)) |
81bafd36 | 3893 | { |
726a989a | 3894 | modify_dest = gimple_call_lhs (stmt); |
81bafd36 ILT |
3895 | |
3896 | /* The function which we are inlining might not return a value, | |
3897 | in which case we should issue a warning that the function | |
3898 | does not return a value. In that case the optimizers will | |
3899 | see that the variable to which the value is assigned was not | |
3900 | initialized. We do not want to issue a warning about that | |
3901 | uninitialized variable. */ | |
3902 | if (DECL_P (modify_dest)) | |
3903 | TREE_NO_WARNING (modify_dest) = 1; | |
726a989a RB |
3904 | |
3905 | if (gimple_call_return_slot_opt_p (stmt)) | |
fa47911c | 3906 | { |
110cfe1c | 3907 | return_slot = modify_dest; |
fa47911c JM |
3908 | modify_dest = NULL; |
3909 | } | |
81bafd36 | 3910 | } |
7740f00d RH |
3911 | else |
3912 | modify_dest = NULL; | |
3913 | ||
1ea193c2 ILT |
3914 | /* If we are inlining a call to the C++ operator new, we don't want |
3915 | to use type based alias analysis on the return value. Otherwise | |
3916 | we may get confused if the compiler sees that the inlined new | |
3917 | function returns a pointer which was just deleted. See bug | |
3918 | 33407. */ | |
3919 | if (DECL_IS_OPERATOR_NEW (fn)) | |
3920 | { | |
3921 | return_slot = NULL; | |
3922 | modify_dest = NULL; | |
3923 | } | |
3924 | ||
d4e4baa9 | 3925 | /* Declare the return variable for the function. */ |
6938f93f | 3926 | use_retvar = declare_return_variable (id, return_slot, modify_dest, bb); |
1ea193c2 | 3927 | |
acb8f212 | 3928 | /* Add local vars in this inlined callee to caller. */ |
c021f10b | 3929 | add_local_variables (id->src_cfun, cfun, id, true); |
acb8f212 | 3930 | |
0d63a740 JH |
3931 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3932 | { | |
3933 | fprintf (dump_file, "Inlining "); | |
b8698a0f | 3934 | print_generic_expr (dump_file, id->src_fn, 0); |
0d63a740 | 3935 | fprintf (dump_file, " to "); |
b8698a0f | 3936 | print_generic_expr (dump_file, id->dst_fn, 0); |
0d63a740 JH |
3937 | fprintf (dump_file, " with frequency %i\n", cg_edge->frequency); |
3938 | } | |
3939 | ||
eb50f5f4 JH |
3940 | /* This is it. Duplicate the callee body. Assume callee is |
3941 | pre-gimplified. Note that we must not alter the caller | |
3942 | function in any way before this point, as this CALL_EXPR may be | |
3943 | a self-referential call; if we're calling ourselves, we need to | |
3944 | duplicate our body before altering anything. */ | |
0d63a740 JH |
3945 | copy_body (id, bb->count, |
3946 | cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE, | |
91382288 | 3947 | bb, return_block, NULL, NULL); |
eb50f5f4 | 3948 | |
d086d311 | 3949 | /* Reset the escaped solution. */ |
6b8ed145 | 3950 | if (cfun->gimple_df) |
d086d311 | 3951 | pt_solution_reset (&cfun->gimple_df->escaped); |
6b8ed145 | 3952 | |
d4e4baa9 | 3953 | /* Clean up. */ |
b5b8b0ac AO |
3954 | if (id->debug_map) |
3955 | { | |
3956 | pointer_map_destroy (id->debug_map); | |
3957 | id->debug_map = dst; | |
3958 | } | |
6be42dd4 | 3959 | pointer_map_destroy (id->decl_map); |
d4e4baa9 AO |
3960 | id->decl_map = st; |
3961 | ||
5006671f RG |
3962 | /* Unlink the calls virtual operands before replacing it. */ |
3963 | unlink_stmt_vdef (stmt); | |
3964 | ||
84936f6f | 3965 | /* If the inlined function returns a result that we care about, |
726a989a RB |
3966 | substitute the GIMPLE_CALL with an assignment of the return |
3967 | variable to the LHS of the call. That is, if STMT was | |
3968 | 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */ | |
3969 | if (use_retvar && gimple_call_lhs (stmt)) | |
e21aff8a | 3970 | { |
726a989a RB |
3971 | gimple old_stmt = stmt; |
3972 | stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar); | |
3973 | gsi_replace (&stmt_gsi, stmt, false); | |
110cfe1c | 3974 | if (gimple_in_ssa_p (cfun)) |
5006671f | 3975 | mark_symbols_for_renaming (stmt); |
726a989a | 3976 | maybe_clean_or_replace_eh_stmt (old_stmt, stmt); |
e21aff8a | 3977 | } |
6de9cd9a | 3978 | else |
110cfe1c | 3979 | { |
726a989a RB |
3980 | /* Handle the case of inlining a function with no return |
3981 | statement, which causes the return value to become undefined. */ | |
3982 | if (gimple_call_lhs (stmt) | |
3983 | && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME) | |
110cfe1c | 3984 | { |
726a989a RB |
3985 | tree name = gimple_call_lhs (stmt); |
3986 | tree var = SSA_NAME_VAR (name); | |
110cfe1c JH |
3987 | tree def = gimple_default_def (cfun, var); |
3988 | ||
110cfe1c JH |
3989 | if (def) |
3990 | { | |
726a989a RB |
3991 | /* If the variable is used undefined, make this name |
3992 | undefined via a move. */ | |
3993 | stmt = gimple_build_assign (gimple_call_lhs (stmt), def); | |
3994 | gsi_replace (&stmt_gsi, stmt, true); | |
110cfe1c | 3995 | } |
110cfe1c JH |
3996 | else |
3997 | { | |
726a989a RB |
3998 | /* Otherwise make this variable undefined. */ |
3999 | gsi_remove (&stmt_gsi, true); | |
110cfe1c | 4000 | set_default_def (var, name); |
726a989a | 4001 | SSA_NAME_DEF_STMT (name) = gimple_build_nop (); |
110cfe1c JH |
4002 | } |
4003 | } | |
4004 | else | |
726a989a | 4005 | gsi_remove (&stmt_gsi, true); |
110cfe1c | 4006 | } |
d4e4baa9 | 4007 | |
4f6c2131 | 4008 | if (purge_dead_abnormal_edges) |
726a989a | 4009 | gimple_purge_dead_abnormal_call_edges (return_block); |
84936f6f | 4010 | |
e21aff8a SB |
4011 | /* If the value of the new expression is ignored, that's OK. We |
4012 | don't warn about this for CALL_EXPRs, so we shouldn't warn about | |
4013 | the equivalent inlined version either. */ | |
726a989a RB |
4014 | if (is_gimple_assign (stmt)) |
4015 | { | |
4016 | gcc_assert (gimple_assign_single_p (stmt) | |
1a87cf0c | 4017 | || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))); |
726a989a RB |
4018 | TREE_USED (gimple_assign_rhs1 (stmt)) = 1; |
4019 | } | |
84936f6f | 4020 | |
1eb3331e DB |
4021 | /* Output the inlining info for this abstract function, since it has been |
4022 | inlined. If we don't do this now, we can lose the information about the | |
4023 | variables in the function when the blocks get blown away as soon as we | |
4024 | remove the cgraph node. */ | |
e21aff8a | 4025 | (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl); |
84936f6f | 4026 | |
e72fcfe8 | 4027 | /* Update callgraph if needed. */ |
e21aff8a | 4028 | cgraph_remove_node (cg_edge->callee); |
e72fcfe8 | 4029 | |
e21aff8a | 4030 | id->block = NULL_TREE; |
e21aff8a | 4031 | successfully_inlined = TRUE; |
742a37d5 | 4032 | |
6de9cd9a DN |
4033 | egress: |
4034 | input_location = saved_location; | |
e21aff8a | 4035 | return successfully_inlined; |
d4e4baa9 | 4036 | } |
6de9cd9a | 4037 | |
e21aff8a SB |
4038 | /* Expand call statements reachable from STMT_P. |
4039 | We can only have CALL_EXPRs as the "toplevel" tree code or nested | |
12947319 | 4040 | in a MODIFY_EXPR. See gimple.c:get_call_expr_in(). We can |
e21aff8a SB |
4041 | unfortunately not use that function here because we need a pointer |
4042 | to the CALL_EXPR, not the tree itself. */ | |
4043 | ||
4044 | static bool | |
1b369fae | 4045 | gimple_expand_calls_inline (basic_block bb, copy_body_data *id) |
6de9cd9a | 4046 | { |
726a989a | 4047 | gimple_stmt_iterator gsi; |
6de9cd9a | 4048 | |
726a989a | 4049 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
6de9cd9a | 4050 | { |
726a989a | 4051 | gimple stmt = gsi_stmt (gsi); |
e21aff8a | 4052 | |
726a989a RB |
4053 | if (is_gimple_call (stmt) |
4054 | && expand_call_inline (bb, stmt, id)) | |
4055 | return true; | |
6de9cd9a | 4056 | } |
726a989a | 4057 | |
e21aff8a | 4058 | return false; |
6de9cd9a DN |
4059 | } |
4060 | ||
726a989a | 4061 | |
b8a00a4d JH |
4062 | /* Walk all basic blocks created after FIRST and try to fold every statement |
4063 | in the STATEMENTS pointer set. */ | |
726a989a | 4064 | |
b8a00a4d JH |
4065 | static void |
4066 | fold_marked_statements (int first, struct pointer_set_t *statements) | |
4067 | { | |
726a989a | 4068 | for (; first < n_basic_blocks; first++) |
b8a00a4d JH |
4069 | if (BASIC_BLOCK (first)) |
4070 | { | |
726a989a RB |
4071 | gimple_stmt_iterator gsi; |
4072 | ||
4073 | for (gsi = gsi_start_bb (BASIC_BLOCK (first)); | |
4074 | !gsi_end_p (gsi); | |
4075 | gsi_next (&gsi)) | |
4076 | if (pointer_set_contains (statements, gsi_stmt (gsi))) | |
9477eb38 | 4077 | { |
726a989a | 4078 | gimple old_stmt = gsi_stmt (gsi); |
4b685e14 | 4079 | tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0; |
2bafad93 | 4080 | |
44e10129 MM |
4081 | if (old_decl && DECL_BUILT_IN (old_decl)) |
4082 | { | |
4083 | /* Folding builtins can create multiple instructions, | |
4084 | we need to look at all of them. */ | |
4085 | gimple_stmt_iterator i2 = gsi; | |
4086 | gsi_prev (&i2); | |
4087 | if (fold_stmt (&gsi)) | |
4088 | { | |
4089 | gimple new_stmt; | |
4090 | if (gsi_end_p (i2)) | |
4091 | i2 = gsi_start_bb (BASIC_BLOCK (first)); | |
4092 | else | |
4093 | gsi_next (&i2); | |
4094 | while (1) | |
4095 | { | |
4096 | new_stmt = gsi_stmt (i2); | |
4097 | update_stmt (new_stmt); | |
4098 | cgraph_update_edges_for_call_stmt (old_stmt, old_decl, | |
4099 | new_stmt); | |
4100 | ||
4101 | if (new_stmt == gsi_stmt (gsi)) | |
4102 | { | |
4103 | /* It is okay to check only for the very last | |
4104 | of these statements. If it is a throwing | |
4105 | statement nothing will change. If it isn't | |
4106 | this can remove EH edges. If that weren't | |
4107 | correct then because some intermediate stmts | |
4108 | throw, but not the last one. That would mean | |
4109 | we'd have to split the block, which we can't | |
4110 | here and we'd loose anyway. And as builtins | |
4111 | probably never throw, this all | |
4112 | is mood anyway. */ | |
4113 | if (maybe_clean_or_replace_eh_stmt (old_stmt, | |
4114 | new_stmt)) | |
4115 | gimple_purge_dead_eh_edges (BASIC_BLOCK (first)); | |
4116 | break; | |
4117 | } | |
4118 | gsi_next (&i2); | |
4119 | } | |
4120 | } | |
4121 | } | |
4122 | else if (fold_stmt (&gsi)) | |
9477eb38 | 4123 | { |
726a989a RB |
4124 | /* Re-read the statement from GSI as fold_stmt() may |
4125 | have changed it. */ | |
4126 | gimple new_stmt = gsi_stmt (gsi); | |
4127 | update_stmt (new_stmt); | |
4128 | ||
4b685e14 JH |
4129 | if (is_gimple_call (old_stmt) |
4130 | || is_gimple_call (new_stmt)) | |
44e10129 MM |
4131 | cgraph_update_edges_for_call_stmt (old_stmt, old_decl, |
4132 | new_stmt); | |
726a989a RB |
4133 | |
4134 | if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt)) | |
4135 | gimple_purge_dead_eh_edges (BASIC_BLOCK (first)); | |
9477eb38 JH |
4136 | } |
4137 | } | |
b8a00a4d JH |
4138 | } |
4139 | } | |
4140 | ||
1084e689 JH |
4141 | /* Return true if BB has at least one abnormal outgoing edge. */ |
4142 | ||
4143 | static inline bool | |
4144 | has_abnormal_outgoing_edge_p (basic_block bb) | |
4145 | { | |
4146 | edge e; | |
4147 | edge_iterator ei; | |
4148 | ||
4149 | FOR_EACH_EDGE (e, ei, bb->succs) | |
4150 | if (e->flags & EDGE_ABNORMAL) | |
4151 | return true; | |
4152 | ||
4153 | return false; | |
4154 | } | |
4155 | ||
d4e4baa9 AO |
4156 | /* Expand calls to inline functions in the body of FN. */ |
4157 | ||
873aa8f5 | 4158 | unsigned int |
46c5ad27 | 4159 | optimize_inline_calls (tree fn) |
d4e4baa9 | 4160 | { |
1b369fae | 4161 | copy_body_data id; |
e21aff8a | 4162 | basic_block bb; |
b8a00a4d | 4163 | int last = n_basic_blocks; |
d406b663 | 4164 | struct gimplify_ctx gctx; |
5d7b099c | 4165 | bool inlined_p = false; |
d406b663 | 4166 | |
c5b6f18e MM |
4167 | /* There is no point in performing inlining if errors have already |
4168 | occurred -- and we might crash if we try to inline invalid | |
4169 | code. */ | |
1da2ed5f | 4170 | if (seen_error ()) |
873aa8f5 | 4171 | return 0; |
c5b6f18e | 4172 | |
d4e4baa9 AO |
4173 | /* Clear out ID. */ |
4174 | memset (&id, 0, sizeof (id)); | |
4175 | ||
1b369fae RH |
4176 | id.src_node = id.dst_node = cgraph_node (fn); |
4177 | id.dst_fn = fn; | |
d4e4baa9 | 4178 | /* Or any functions that aren't finished yet. */ |
d4e4baa9 | 4179 | if (current_function_decl) |
0f900dfa | 4180 | id.dst_fn = current_function_decl; |
1b369fae RH |
4181 | |
4182 | id.copy_decl = copy_decl_maybe_to_var; | |
4183 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
4184 | id.transform_new_cfg = false; | |
4185 | id.transform_return_to_modify = true; | |
9ff420f1 | 4186 | id.transform_lang_insert_block = NULL; |
b8a00a4d | 4187 | id.statements_to_fold = pointer_set_create (); |
1b369fae | 4188 | |
d406b663 | 4189 | push_gimplify_context (&gctx); |
d4e4baa9 | 4190 | |
672987e8 ZD |
4191 | /* We make no attempts to keep dominance info up-to-date. */ |
4192 | free_dominance_info (CDI_DOMINATORS); | |
4193 | free_dominance_info (CDI_POST_DOMINATORS); | |
4194 | ||
726a989a RB |
4195 | /* Register specific gimple functions. */ |
4196 | gimple_register_cfg_hooks (); | |
4197 | ||
e21aff8a SB |
4198 | /* Reach the trees by walking over the CFG, and note the |
4199 | enclosing basic-blocks in the call edges. */ | |
4200 | /* We walk the blocks going forward, because inlined function bodies | |
4201 | will split id->current_basic_block, and the new blocks will | |
4202 | follow it; we'll trudge through them, processing their CALL_EXPRs | |
4203 | along the way. */ | |
4204 | FOR_EACH_BB (bb) | |
5d7b099c | 4205 | inlined_p |= gimple_expand_calls_inline (bb, &id); |
d4e4baa9 | 4206 | |
e21aff8a | 4207 | pop_gimplify_context (NULL); |
6de9cd9a | 4208 | |
18c6ada9 JH |
4209 | #ifdef ENABLE_CHECKING |
4210 | { | |
4211 | struct cgraph_edge *e; | |
4212 | ||
1b369fae | 4213 | verify_cgraph_node (id.dst_node); |
18c6ada9 JH |
4214 | |
4215 | /* Double check that we inlined everything we are supposed to inline. */ | |
1b369fae | 4216 | for (e = id.dst_node->callees; e; e = e->next_callee) |
1e128c5f | 4217 | gcc_assert (e->inline_failed); |
18c6ada9 JH |
4218 | } |
4219 | #endif | |
b8698a0f | 4220 | |
5d7b099c | 4221 | /* Fold queued statements. */ |
a9eafe81 AP |
4222 | fold_marked_statements (last, id.statements_to_fold); |
4223 | pointer_set_destroy (id.statements_to_fold); | |
b8698a0f | 4224 | |
b5b8b0ac AO |
4225 | gcc_assert (!id.debug_stmts); |
4226 | ||
5d7b099c RG |
4227 | /* If we didn't inline into the function there is nothing to do. */ |
4228 | if (!inlined_p) | |
4229 | return 0; | |
4230 | ||
a9eafe81 AP |
4231 | /* Renumber the lexical scoping (non-code) blocks consecutively. */ |
4232 | number_blocks (fn); | |
b8a00a4d | 4233 | |
078c3644 JH |
4234 | delete_unreachable_blocks_update_callgraph (&id); |
4235 | #ifdef ENABLE_CHECKING | |
4236 | verify_cgraph_node (id.dst_node); | |
4237 | #endif | |
726a989a | 4238 | |
110cfe1c JH |
4239 | /* It would be nice to check SSA/CFG/statement consistency here, but it is |
4240 | not possible yet - the IPA passes might make various functions to not | |
4241 | throw and they don't care to proactively update local EH info. This is | |
4242 | done later in fixup_cfg pass that also execute the verification. */ | |
726a989a RB |
4243 | return (TODO_update_ssa |
4244 | | TODO_cleanup_cfg | |
45a80bb9 | 4245 | | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0) |
5d7b099c | 4246 | | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0) |
45a80bb9 | 4247 | | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0)); |
d4e4baa9 AO |
4248 | } |
4249 | ||
d4e4baa9 AO |
4250 | /* Passed to walk_tree. Copies the node pointed to, if appropriate. */ |
4251 | ||
4252 | tree | |
46c5ad27 | 4253 | copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
d4e4baa9 AO |
4254 | { |
4255 | enum tree_code code = TREE_CODE (*tp); | |
07beea0d | 4256 | enum tree_code_class cl = TREE_CODE_CLASS (code); |
d4e4baa9 AO |
4257 | |
4258 | /* We make copies of most nodes. */ | |
07beea0d | 4259 | if (IS_EXPR_CODE_CLASS (cl) |
d4e4baa9 AO |
4260 | || code == TREE_LIST |
4261 | || code == TREE_VEC | |
8843c120 DN |
4262 | || code == TYPE_DECL |
4263 | || code == OMP_CLAUSE) | |
d4e4baa9 AO |
4264 | { |
4265 | /* Because the chain gets clobbered when we make a copy, we save it | |
4266 | here. */ | |
82d6e6fc | 4267 | tree chain = NULL_TREE, new_tree; |
07beea0d | 4268 | |
726a989a | 4269 | chain = TREE_CHAIN (*tp); |
d4e4baa9 AO |
4270 | |
4271 | /* Copy the node. */ | |
82d6e6fc | 4272 | new_tree = copy_node (*tp); |
6de9cd9a DN |
4273 | |
4274 | /* Propagate mudflap marked-ness. */ | |
4275 | if (flag_mudflap && mf_marked_p (*tp)) | |
82d6e6fc | 4276 | mf_mark (new_tree); |
6de9cd9a | 4277 | |
82d6e6fc | 4278 | *tp = new_tree; |
d4e4baa9 AO |
4279 | |
4280 | /* Now, restore the chain, if appropriate. That will cause | |
4281 | walk_tree to walk into the chain as well. */ | |
50674e96 DN |
4282 | if (code == PARM_DECL |
4283 | || code == TREE_LIST | |
aaf46ef9 | 4284 | || code == OMP_CLAUSE) |
d4e4baa9 AO |
4285 | TREE_CHAIN (*tp) = chain; |
4286 | ||
4287 | /* For now, we don't update BLOCKs when we make copies. So, we | |
6de9cd9a DN |
4288 | have to nullify all BIND_EXPRs. */ |
4289 | if (TREE_CODE (*tp) == BIND_EXPR) | |
4290 | BIND_EXPR_BLOCK (*tp) = NULL_TREE; | |
d4e4baa9 | 4291 | } |
4038c495 GB |
4292 | else if (code == CONSTRUCTOR) |
4293 | { | |
4294 | /* CONSTRUCTOR nodes need special handling because | |
4295 | we need to duplicate the vector of elements. */ | |
82d6e6fc | 4296 | tree new_tree; |
4038c495 | 4297 | |
82d6e6fc | 4298 | new_tree = copy_node (*tp); |
4038c495 GB |
4299 | |
4300 | /* Propagate mudflap marked-ness. */ | |
4301 | if (flag_mudflap && mf_marked_p (*tp)) | |
82d6e6fc | 4302 | mf_mark (new_tree); |
9f63daea | 4303 | |
82d6e6fc | 4304 | CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc, |
4038c495 | 4305 | CONSTRUCTOR_ELTS (*tp)); |
82d6e6fc | 4306 | *tp = new_tree; |
4038c495 | 4307 | } |
6615c446 | 4308 | else if (TREE_CODE_CLASS (code) == tcc_type) |
d4e4baa9 | 4309 | *walk_subtrees = 0; |
6615c446 | 4310 | else if (TREE_CODE_CLASS (code) == tcc_declaration) |
6de9cd9a | 4311 | *walk_subtrees = 0; |
a396f8ae GK |
4312 | else if (TREE_CODE_CLASS (code) == tcc_constant) |
4313 | *walk_subtrees = 0; | |
1e128c5f GB |
4314 | else |
4315 | gcc_assert (code != STATEMENT_LIST); | |
d4e4baa9 AO |
4316 | return NULL_TREE; |
4317 | } | |
4318 | ||
4319 | /* The SAVE_EXPR pointed to by TP is being copied. If ST contains | |
aa4a53af | 4320 | information indicating to what new SAVE_EXPR this one should be mapped, |
e21aff8a SB |
4321 | use that one. Otherwise, create a new node and enter it in ST. FN is |
4322 | the function into which the copy will be placed. */ | |
d4e4baa9 | 4323 | |
892c7e1e | 4324 | static void |
82c82743 | 4325 | remap_save_expr (tree *tp, void *st_, int *walk_subtrees) |
d4e4baa9 | 4326 | { |
6be42dd4 RG |
4327 | struct pointer_map_t *st = (struct pointer_map_t *) st_; |
4328 | tree *n; | |
5e20bdd7 | 4329 | tree t; |
d4e4baa9 AO |
4330 | |
4331 | /* See if we already encountered this SAVE_EXPR. */ | |
6be42dd4 | 4332 | n = (tree *) pointer_map_contains (st, *tp); |
d92b4486 | 4333 | |
d4e4baa9 AO |
4334 | /* If we didn't already remap this SAVE_EXPR, do so now. */ |
4335 | if (!n) | |
4336 | { | |
5e20bdd7 | 4337 | t = copy_node (*tp); |
d4e4baa9 | 4338 | |
d4e4baa9 | 4339 | /* Remember this SAVE_EXPR. */ |
6be42dd4 | 4340 | *pointer_map_insert (st, *tp) = t; |
350ebd54 | 4341 | /* Make sure we don't remap an already-remapped SAVE_EXPR. */ |
6be42dd4 | 4342 | *pointer_map_insert (st, t) = t; |
d4e4baa9 AO |
4343 | } |
4344 | else | |
5e20bdd7 JZ |
4345 | { |
4346 | /* We've already walked into this SAVE_EXPR; don't do it again. */ | |
4347 | *walk_subtrees = 0; | |
6be42dd4 | 4348 | t = *n; |
5e20bdd7 | 4349 | } |
d4e4baa9 AO |
4350 | |
4351 | /* Replace this SAVE_EXPR with the copy. */ | |
5e20bdd7 | 4352 | *tp = t; |
d4e4baa9 | 4353 | } |
d436bff8 | 4354 | |
aa4a53af RK |
4355 | /* Called via walk_tree. If *TP points to a DECL_STMT for a local label, |
4356 | copies the declaration and enters it in the splay_tree in DATA (which is | |
1b369fae | 4357 | really an `copy_body_data *'). */ |
6de9cd9a DN |
4358 | |
4359 | static tree | |
4360 | mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, | |
4361 | void *data) | |
4362 | { | |
1b369fae | 4363 | copy_body_data *id = (copy_body_data *) data; |
6de9cd9a DN |
4364 | |
4365 | /* Don't walk into types. */ | |
350fae66 RK |
4366 | if (TYPE_P (*tp)) |
4367 | *walk_subtrees = 0; | |
6de9cd9a | 4368 | |
350fae66 | 4369 | else if (TREE_CODE (*tp) == LABEL_EXPR) |
6de9cd9a | 4370 | { |
350fae66 | 4371 | tree decl = TREE_OPERAND (*tp, 0); |
6de9cd9a | 4372 | |
350fae66 | 4373 | /* Copy the decl and remember the copy. */ |
1b369fae | 4374 | insert_decl_map (id, decl, id->copy_decl (decl, id)); |
6de9cd9a DN |
4375 | } |
4376 | ||
4377 | return NULL_TREE; | |
4378 | } | |
4379 | ||
19114537 EC |
4380 | /* Perform any modifications to EXPR required when it is unsaved. Does |
4381 | not recurse into EXPR's subtrees. */ | |
4382 | ||
4383 | static void | |
4384 | unsave_expr_1 (tree expr) | |
4385 | { | |
4386 | switch (TREE_CODE (expr)) | |
4387 | { | |
4388 | case TARGET_EXPR: | |
4389 | /* Don't mess with a TARGET_EXPR that hasn't been expanded. | |
4390 | It's OK for this to happen if it was part of a subtree that | |
4391 | isn't immediately expanded, such as operand 2 of another | |
4392 | TARGET_EXPR. */ | |
4393 | if (TREE_OPERAND (expr, 1)) | |
4394 | break; | |
4395 | ||
4396 | TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3); | |
4397 | TREE_OPERAND (expr, 3) = NULL_TREE; | |
4398 | break; | |
4399 | ||
4400 | default: | |
4401 | break; | |
4402 | } | |
4403 | } | |
4404 | ||
6de9cd9a DN |
4405 | /* Called via walk_tree when an expression is unsaved. Using the |
4406 | splay_tree pointed to by ST (which is really a `splay_tree'), | |
4407 | remaps all local declarations to appropriate replacements. */ | |
d436bff8 AH |
4408 | |
4409 | static tree | |
6de9cd9a | 4410 | unsave_r (tree *tp, int *walk_subtrees, void *data) |
d436bff8 | 4411 | { |
1b369fae | 4412 | copy_body_data *id = (copy_body_data *) data; |
6be42dd4 RG |
4413 | struct pointer_map_t *st = id->decl_map; |
4414 | tree *n; | |
6de9cd9a DN |
4415 | |
4416 | /* Only a local declaration (variable or label). */ | |
4417 | if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp)) | |
4418 | || TREE_CODE (*tp) == LABEL_DECL) | |
4419 | { | |
4420 | /* Lookup the declaration. */ | |
6be42dd4 | 4421 | n = (tree *) pointer_map_contains (st, *tp); |
9f63daea | 4422 | |
6de9cd9a DN |
4423 | /* If it's there, remap it. */ |
4424 | if (n) | |
6be42dd4 | 4425 | *tp = *n; |
6de9cd9a | 4426 | } |
aa4a53af | 4427 | |
6de9cd9a | 4428 | else if (TREE_CODE (*tp) == STATEMENT_LIST) |
726a989a | 4429 | gcc_unreachable (); |
6de9cd9a DN |
4430 | else if (TREE_CODE (*tp) == BIND_EXPR) |
4431 | copy_bind_expr (tp, walk_subtrees, id); | |
a406865a RG |
4432 | else if (TREE_CODE (*tp) == SAVE_EXPR |
4433 | || TREE_CODE (*tp) == TARGET_EXPR) | |
82c82743 | 4434 | remap_save_expr (tp, st, walk_subtrees); |
d436bff8 | 4435 | else |
6de9cd9a DN |
4436 | { |
4437 | copy_tree_r (tp, walk_subtrees, NULL); | |
4438 | ||
4439 | /* Do whatever unsaving is required. */ | |
4440 | unsave_expr_1 (*tp); | |
4441 | } | |
4442 | ||
4443 | /* Keep iterating. */ | |
4444 | return NULL_TREE; | |
d436bff8 AH |
4445 | } |
4446 | ||
19114537 EC |
4447 | /* Copies everything in EXPR and replaces variables, labels |
4448 | and SAVE_EXPRs local to EXPR. */ | |
6de9cd9a DN |
4449 | |
4450 | tree | |
19114537 | 4451 | unsave_expr_now (tree expr) |
6de9cd9a | 4452 | { |
1b369fae | 4453 | copy_body_data id; |
6de9cd9a DN |
4454 | |
4455 | /* There's nothing to do for NULL_TREE. */ | |
4456 | if (expr == 0) | |
4457 | return expr; | |
4458 | ||
4459 | /* Set up ID. */ | |
4460 | memset (&id, 0, sizeof (id)); | |
1b369fae RH |
4461 | id.src_fn = current_function_decl; |
4462 | id.dst_fn = current_function_decl; | |
6be42dd4 | 4463 | id.decl_map = pointer_map_create (); |
b5b8b0ac | 4464 | id.debug_map = NULL; |
6de9cd9a | 4465 | |
1b369fae RH |
4466 | id.copy_decl = copy_decl_no_change; |
4467 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
4468 | id.transform_new_cfg = false; | |
4469 | id.transform_return_to_modify = false; | |
9ff420f1 | 4470 | id.transform_lang_insert_block = NULL; |
1b369fae | 4471 | |
6de9cd9a DN |
4472 | /* Walk the tree once to find local labels. */ |
4473 | walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id); | |
4474 | ||
4475 | /* Walk the tree again, copying, remapping, and unsaving. */ | |
4476 | walk_tree (&expr, unsave_r, &id, NULL); | |
4477 | ||
4478 | /* Clean up. */ | |
6be42dd4 | 4479 | pointer_map_destroy (id.decl_map); |
b5b8b0ac AO |
4480 | if (id.debug_map) |
4481 | pointer_map_destroy (id.debug_map); | |
6de9cd9a DN |
4482 | |
4483 | return expr; | |
4484 | } | |
4485 | ||
726a989a RB |
4486 | /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local |
4487 | label, copies the declaration and enters it in the splay_tree in DATA (which | |
4488 | is really a 'copy_body_data *'. */ | |
4489 | ||
4490 | static tree | |
4491 | mark_local_labels_stmt (gimple_stmt_iterator *gsip, | |
4492 | bool *handled_ops_p ATTRIBUTE_UNUSED, | |
4493 | struct walk_stmt_info *wi) | |
4494 | { | |
4495 | copy_body_data *id = (copy_body_data *) wi->info; | |
4496 | gimple stmt = gsi_stmt (*gsip); | |
4497 | ||
4498 | if (gimple_code (stmt) == GIMPLE_LABEL) | |
4499 | { | |
4500 | tree decl = gimple_label_label (stmt); | |
4501 | ||
4502 | /* Copy the decl and remember the copy. */ | |
4503 | insert_decl_map (id, decl, id->copy_decl (decl, id)); | |
4504 | } | |
4505 | ||
4506 | return NULL_TREE; | |
4507 | } | |
4508 | ||
4509 | ||
4510 | /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local. | |
4511 | Using the splay_tree pointed to by ST (which is really a `splay_tree'), | |
4512 | remaps all local declarations to appropriate replacements in gimple | |
4513 | operands. */ | |
4514 | ||
4515 | static tree | |
4516 | replace_locals_op (tree *tp, int *walk_subtrees, void *data) | |
4517 | { | |
4518 | struct walk_stmt_info *wi = (struct walk_stmt_info*) data; | |
4519 | copy_body_data *id = (copy_body_data *) wi->info; | |
4520 | struct pointer_map_t *st = id->decl_map; | |
4521 | tree *n; | |
4522 | tree expr = *tp; | |
4523 | ||
4524 | /* Only a local declaration (variable or label). */ | |
4525 | if ((TREE_CODE (expr) == VAR_DECL | |
4526 | && !TREE_STATIC (expr)) | |
4527 | || TREE_CODE (expr) == LABEL_DECL) | |
4528 | { | |
4529 | /* Lookup the declaration. */ | |
4530 | n = (tree *) pointer_map_contains (st, expr); | |
4531 | ||
4532 | /* If it's there, remap it. */ | |
4533 | if (n) | |
4534 | *tp = *n; | |
4535 | *walk_subtrees = 0; | |
4536 | } | |
4537 | else if (TREE_CODE (expr) == STATEMENT_LIST | |
4538 | || TREE_CODE (expr) == BIND_EXPR | |
4539 | || TREE_CODE (expr) == SAVE_EXPR) | |
4540 | gcc_unreachable (); | |
4541 | else if (TREE_CODE (expr) == TARGET_EXPR) | |
4542 | { | |
4543 | /* Don't mess with a TARGET_EXPR that hasn't been expanded. | |
4544 | It's OK for this to happen if it was part of a subtree that | |
4545 | isn't immediately expanded, such as operand 2 of another | |
4546 | TARGET_EXPR. */ | |
4547 | if (!TREE_OPERAND (expr, 1)) | |
4548 | { | |
4549 | TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3); | |
4550 | TREE_OPERAND (expr, 3) = NULL_TREE; | |
4551 | } | |
4552 | } | |
4553 | ||
4554 | /* Keep iterating. */ | |
4555 | return NULL_TREE; | |
4556 | } | |
4557 | ||
4558 | ||
4559 | /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local. | |
4560 | Using the splay_tree pointed to by ST (which is really a `splay_tree'), | |
4561 | remaps all local declarations to appropriate replacements in gimple | |
4562 | statements. */ | |
4563 | ||
4564 | static tree | |
4565 | replace_locals_stmt (gimple_stmt_iterator *gsip, | |
4566 | bool *handled_ops_p ATTRIBUTE_UNUSED, | |
4567 | struct walk_stmt_info *wi) | |
4568 | { | |
4569 | copy_body_data *id = (copy_body_data *) wi->info; | |
4570 | gimple stmt = gsi_stmt (*gsip); | |
4571 | ||
4572 | if (gimple_code (stmt) == GIMPLE_BIND) | |
4573 | { | |
4574 | tree block = gimple_bind_block (stmt); | |
4575 | ||
4576 | if (block) | |
4577 | { | |
4578 | remap_block (&block, id); | |
4579 | gimple_bind_set_block (stmt, block); | |
4580 | } | |
4581 | ||
4582 | /* This will remap a lot of the same decls again, but this should be | |
4583 | harmless. */ | |
4584 | if (gimple_bind_vars (stmt)) | |
526d73ab | 4585 | gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id)); |
726a989a RB |
4586 | } |
4587 | ||
4588 | /* Keep iterating. */ | |
4589 | return NULL_TREE; | |
4590 | } | |
4591 | ||
4592 | ||
4593 | /* Copies everything in SEQ and replaces variables and labels local to | |
4594 | current_function_decl. */ | |
4595 | ||
4596 | gimple_seq | |
4597 | copy_gimple_seq_and_replace_locals (gimple_seq seq) | |
4598 | { | |
4599 | copy_body_data id; | |
4600 | struct walk_stmt_info wi; | |
4601 | struct pointer_set_t *visited; | |
4602 | gimple_seq copy; | |
4603 | ||
4604 | /* There's nothing to do for NULL_TREE. */ | |
4605 | if (seq == NULL) | |
4606 | return seq; | |
4607 | ||
4608 | /* Set up ID. */ | |
4609 | memset (&id, 0, sizeof (id)); | |
4610 | id.src_fn = current_function_decl; | |
4611 | id.dst_fn = current_function_decl; | |
4612 | id.decl_map = pointer_map_create (); | |
b5b8b0ac | 4613 | id.debug_map = NULL; |
726a989a RB |
4614 | |
4615 | id.copy_decl = copy_decl_no_change; | |
4616 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
4617 | id.transform_new_cfg = false; | |
4618 | id.transform_return_to_modify = false; | |
4619 | id.transform_lang_insert_block = NULL; | |
4620 | ||
4621 | /* Walk the tree once to find local labels. */ | |
4622 | memset (&wi, 0, sizeof (wi)); | |
4623 | visited = pointer_set_create (); | |
4624 | wi.info = &id; | |
4625 | wi.pset = visited; | |
4626 | walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi); | |
4627 | pointer_set_destroy (visited); | |
4628 | ||
4629 | copy = gimple_seq_copy (seq); | |
4630 | ||
4631 | /* Walk the copy, remapping decls. */ | |
4632 | memset (&wi, 0, sizeof (wi)); | |
4633 | wi.info = &id; | |
4634 | walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi); | |
4635 | ||
4636 | /* Clean up. */ | |
4637 | pointer_map_destroy (id.decl_map); | |
b5b8b0ac AO |
4638 | if (id.debug_map) |
4639 | pointer_map_destroy (id.debug_map); | |
726a989a RB |
4640 | |
4641 | return copy; | |
4642 | } | |
4643 | ||
4644 | ||
6de9cd9a | 4645 | /* Allow someone to determine if SEARCH is a child of TOP from gdb. */ |
aa4a53af | 4646 | |
6de9cd9a DN |
4647 | static tree |
4648 | debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data) | |
4649 | { | |
4650 | if (*tp == data) | |
4651 | return (tree) data; | |
4652 | else | |
4653 | return NULL; | |
4654 | } | |
4655 | ||
24e47c76 | 4656 | DEBUG_FUNCTION bool |
6de9cd9a DN |
4657 | debug_find_tree (tree top, tree search) |
4658 | { | |
4659 | return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0; | |
4660 | } | |
4661 | ||
e21aff8a | 4662 | |
6de9cd9a DN |
4663 | /* Declare the variables created by the inliner. Add all the variables in |
4664 | VARS to BIND_EXPR. */ | |
4665 | ||
4666 | static void | |
e21aff8a | 4667 | declare_inline_vars (tree block, tree vars) |
6de9cd9a | 4668 | { |
84936f6f | 4669 | tree t; |
910ad8de | 4670 | for (t = vars; t; t = DECL_CHAIN (t)) |
9659ce8b JH |
4671 | { |
4672 | DECL_SEEN_IN_BIND_EXPR_P (t) = 1; | |
4673 | gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t)); | |
c021f10b | 4674 | add_local_decl (cfun, t); |
9659ce8b | 4675 | } |
6de9cd9a | 4676 | |
e21aff8a SB |
4677 | if (block) |
4678 | BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars); | |
4679 | } | |
4680 | ||
19734dd8 | 4681 | /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN, |
1b369fae RH |
4682 | but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to |
4683 | VAR_DECL translation. */ | |
19734dd8 | 4684 | |
1b369fae RH |
4685 | static tree |
4686 | copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy) | |
19734dd8 | 4687 | { |
19734dd8 RL |
4688 | /* Don't generate debug information for the copy if we wouldn't have |
4689 | generated it for the copy either. */ | |
4690 | DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl); | |
4691 | DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl); | |
4692 | ||
4693 | /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what | |
b8698a0f | 4694 | declaration inspired this copy. */ |
19734dd8 RL |
4695 | DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl); |
4696 | ||
4697 | /* The new variable/label has no RTL, yet. */ | |
68a976f2 RL |
4698 | if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL) |
4699 | && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy)) | |
2eb79bbb | 4700 | SET_DECL_RTL (copy, 0); |
b8698a0f | 4701 | |
19734dd8 RL |
4702 | /* These args would always appear unused, if not for this. */ |
4703 | TREE_USED (copy) = 1; | |
4704 | ||
4705 | /* Set the context for the new declaration. */ | |
4706 | if (!DECL_CONTEXT (decl)) | |
4707 | /* Globals stay global. */ | |
4708 | ; | |
1b369fae | 4709 | else if (DECL_CONTEXT (decl) != id->src_fn) |
19734dd8 RL |
4710 | /* Things that weren't in the scope of the function we're inlining |
4711 | from aren't in the scope we're inlining to, either. */ | |
4712 | ; | |
4713 | else if (TREE_STATIC (decl)) | |
4714 | /* Function-scoped static variables should stay in the original | |
4715 | function. */ | |
4716 | ; | |
4717 | else | |
4718 | /* Ordinary automatic local variables are now in the scope of the | |
4719 | new function. */ | |
1b369fae | 4720 | DECL_CONTEXT (copy) = id->dst_fn; |
19734dd8 RL |
4721 | |
4722 | return copy; | |
4723 | } | |
4724 | ||
1b369fae RH |
4725 | static tree |
4726 | copy_decl_to_var (tree decl, copy_body_data *id) | |
4727 | { | |
4728 | tree copy, type; | |
4729 | ||
4730 | gcc_assert (TREE_CODE (decl) == PARM_DECL | |
4731 | || TREE_CODE (decl) == RESULT_DECL); | |
4732 | ||
4733 | type = TREE_TYPE (decl); | |
4734 | ||
c2255bc4 AH |
4735 | copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn), |
4736 | VAR_DECL, DECL_NAME (decl), type); | |
25a6a873 RG |
4737 | if (DECL_PT_UID_SET_P (decl)) |
4738 | SET_DECL_PT_UID (copy, DECL_PT_UID (decl)); | |
1b369fae RH |
4739 | TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl); |
4740 | TREE_READONLY (copy) = TREE_READONLY (decl); | |
4741 | TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl); | |
0890b981 | 4742 | DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl); |
1b369fae RH |
4743 | |
4744 | return copy_decl_for_dup_finish (id, decl, copy); | |
4745 | } | |
4746 | ||
c08cd4c1 JM |
4747 | /* Like copy_decl_to_var, but create a return slot object instead of a |
4748 | pointer variable for return by invisible reference. */ | |
4749 | ||
4750 | static tree | |
4751 | copy_result_decl_to_var (tree decl, copy_body_data *id) | |
4752 | { | |
4753 | tree copy, type; | |
4754 | ||
4755 | gcc_assert (TREE_CODE (decl) == PARM_DECL | |
4756 | || TREE_CODE (decl) == RESULT_DECL); | |
4757 | ||
4758 | type = TREE_TYPE (decl); | |
4759 | if (DECL_BY_REFERENCE (decl)) | |
4760 | type = TREE_TYPE (type); | |
4761 | ||
c2255bc4 AH |
4762 | copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn), |
4763 | VAR_DECL, DECL_NAME (decl), type); | |
25a6a873 RG |
4764 | if (DECL_PT_UID_SET_P (decl)) |
4765 | SET_DECL_PT_UID (copy, DECL_PT_UID (decl)); | |
c08cd4c1 JM |
4766 | TREE_READONLY (copy) = TREE_READONLY (decl); |
4767 | TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl); | |
4768 | if (!DECL_BY_REFERENCE (decl)) | |
4769 | { | |
4770 | TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl); | |
0890b981 | 4771 | DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl); |
c08cd4c1 JM |
4772 | } |
4773 | ||
4774 | return copy_decl_for_dup_finish (id, decl, copy); | |
4775 | } | |
4776 | ||
9ff420f1 | 4777 | tree |
1b369fae RH |
4778 | copy_decl_no_change (tree decl, copy_body_data *id) |
4779 | { | |
4780 | tree copy; | |
4781 | ||
4782 | copy = copy_node (decl); | |
4783 | ||
4784 | /* The COPY is not abstract; it will be generated in DST_FN. */ | |
4785 | DECL_ABSTRACT (copy) = 0; | |
4786 | lang_hooks.dup_lang_specific_decl (copy); | |
4787 | ||
4788 | /* TREE_ADDRESSABLE isn't used to indicate that a label's address has | |
4789 | been taken; it's for internal bookkeeping in expand_goto_internal. */ | |
4790 | if (TREE_CODE (copy) == LABEL_DECL) | |
4791 | { | |
4792 | TREE_ADDRESSABLE (copy) = 0; | |
4793 | LABEL_DECL_UID (copy) = -1; | |
4794 | } | |
4795 | ||
4796 | return copy_decl_for_dup_finish (id, decl, copy); | |
4797 | } | |
4798 | ||
4799 | static tree | |
4800 | copy_decl_maybe_to_var (tree decl, copy_body_data *id) | |
4801 | { | |
4802 | if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL) | |
4803 | return copy_decl_to_var (decl, id); | |
4804 | else | |
4805 | return copy_decl_no_change (decl, id); | |
4806 | } | |
4807 | ||
19734dd8 RL |
4808 | /* Return a copy of the function's argument tree. */ |
4809 | static tree | |
c6f7cfc1 JH |
4810 | copy_arguments_for_versioning (tree orig_parm, copy_body_data * id, |
4811 | bitmap args_to_skip, tree *vars) | |
19734dd8 | 4812 | { |
c6f7cfc1 JH |
4813 | tree arg, *parg; |
4814 | tree new_parm = NULL; | |
4815 | int i = 0; | |
19734dd8 | 4816 | |
c6f7cfc1 JH |
4817 | parg = &new_parm; |
4818 | ||
910ad8de | 4819 | for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++) |
c6f7cfc1 JH |
4820 | if (!args_to_skip || !bitmap_bit_p (args_to_skip, i)) |
4821 | { | |
4822 | tree new_tree = remap_decl (arg, id); | |
4823 | lang_hooks.dup_lang_specific_decl (new_tree); | |
4824 | *parg = new_tree; | |
910ad8de | 4825 | parg = &DECL_CHAIN (new_tree); |
c6f7cfc1 | 4826 | } |
eb50f5f4 | 4827 | else if (!pointer_map_contains (id->decl_map, arg)) |
c6f7cfc1 JH |
4828 | { |
4829 | /* Make an equivalent VAR_DECL. If the argument was used | |
4830 | as temporary variable later in function, the uses will be | |
4831 | replaced by local variable. */ | |
4832 | tree var = copy_decl_to_var (arg, id); | |
4833 | get_var_ann (var); | |
4834 | add_referenced_var (var); | |
4835 | insert_decl_map (id, arg, var); | |
4836 | /* Declare this new variable. */ | |
910ad8de | 4837 | DECL_CHAIN (var) = *vars; |
c6f7cfc1 JH |
4838 | *vars = var; |
4839 | } | |
4840 | return new_parm; | |
19734dd8 RL |
4841 | } |
4842 | ||
4843 | /* Return a copy of the function's static chain. */ | |
4844 | static tree | |
1b369fae | 4845 | copy_static_chain (tree static_chain, copy_body_data * id) |
19734dd8 RL |
4846 | { |
4847 | tree *chain_copy, *pvar; | |
4848 | ||
4849 | chain_copy = &static_chain; | |
910ad8de | 4850 | for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar)) |
19734dd8 | 4851 | { |
82d6e6fc KG |
4852 | tree new_tree = remap_decl (*pvar, id); |
4853 | lang_hooks.dup_lang_specific_decl (new_tree); | |
910ad8de | 4854 | DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar); |
82d6e6fc | 4855 | *pvar = new_tree; |
19734dd8 RL |
4856 | } |
4857 | return static_chain; | |
4858 | } | |
4859 | ||
4860 | /* Return true if the function is allowed to be versioned. | |
4861 | This is a guard for the versioning functionality. */ | |
27dbd3ac | 4862 | |
19734dd8 RL |
4863 | bool |
4864 | tree_versionable_function_p (tree fndecl) | |
4865 | { | |
86631ea3 MJ |
4866 | return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl)) |
4867 | && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL); | |
19734dd8 RL |
4868 | } |
4869 | ||
9187e02d JH |
4870 | /* Delete all unreachable basic blocks and update callgraph. |
4871 | Doing so is somewhat nontrivial because we need to update all clones and | |
4872 | remove inline function that become unreachable. */ | |
9f5e9983 | 4873 | |
9187e02d JH |
4874 | static bool |
4875 | delete_unreachable_blocks_update_callgraph (copy_body_data *id) | |
9f5e9983 | 4876 | { |
9187e02d JH |
4877 | bool changed = false; |
4878 | basic_block b, next_bb; | |
4879 | ||
4880 | find_unreachable_blocks (); | |
4881 | ||
4882 | /* Delete all unreachable basic blocks. */ | |
4883 | ||
4884 | for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb) | |
4885 | { | |
4886 | next_bb = b->next_bb; | |
4887 | ||
4888 | if (!(b->flags & BB_REACHABLE)) | |
4889 | { | |
4890 | gimple_stmt_iterator bsi; | |
4891 | ||
4892 | for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi)) | |
4893 | if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL) | |
4894 | { | |
4895 | struct cgraph_edge *e; | |
4896 | struct cgraph_node *node; | |
4897 | ||
4898 | if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL) | |
4899 | { | |
4900 | if (!e->inline_failed) | |
4901 | cgraph_remove_node_and_inline_clones (e->callee); | |
4902 | else | |
4903 | cgraph_remove_edge (e); | |
4904 | } | |
4905 | if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES | |
4906 | && id->dst_node->clones) | |
4907 | for (node = id->dst_node->clones; node != id->dst_node;) | |
4908 | { | |
4909 | if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL) | |
4910 | { | |
4911 | if (!e->inline_failed) | |
4912 | cgraph_remove_node_and_inline_clones (e->callee); | |
4913 | else | |
4914 | cgraph_remove_edge (e); | |
4915 | } | |
b8698a0f | 4916 | |
9187e02d JH |
4917 | if (node->clones) |
4918 | node = node->clones; | |
4919 | else if (node->next_sibling_clone) | |
4920 | node = node->next_sibling_clone; | |
4921 | else | |
4922 | { | |
4923 | while (node != id->dst_node && !node->next_sibling_clone) | |
4924 | node = node->clone_of; | |
4925 | if (node != id->dst_node) | |
4926 | node = node->next_sibling_clone; | |
4927 | } | |
4928 | } | |
4929 | } | |
4930 | delete_basic_block (b); | |
4931 | changed = true; | |
4932 | } | |
4933 | } | |
4934 | ||
4935 | if (changed) | |
4936 | tidy_fallthru_edges (); | |
9187e02d | 4937 | return changed; |
9f5e9983 JJ |
4938 | } |
4939 | ||
08ad1d6d JH |
4940 | /* Update clone info after duplication. */ |
4941 | ||
4942 | static void | |
4943 | update_clone_info (copy_body_data * id) | |
4944 | { | |
4945 | struct cgraph_node *node; | |
4946 | if (!id->dst_node->clones) | |
4947 | return; | |
4948 | for (node = id->dst_node->clones; node != id->dst_node;) | |
4949 | { | |
4950 | /* First update replace maps to match the new body. */ | |
4951 | if (node->clone.tree_map) | |
4952 | { | |
4953 | unsigned int i; | |
4954 | for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++) | |
4955 | { | |
4956 | struct ipa_replace_map *replace_info; | |
4957 | replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i); | |
4958 | walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL); | |
4959 | walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL); | |
4960 | } | |
4961 | } | |
4962 | if (node->clones) | |
4963 | node = node->clones; | |
4964 | else if (node->next_sibling_clone) | |
4965 | node = node->next_sibling_clone; | |
4966 | else | |
4967 | { | |
4968 | while (node != id->dst_node && !node->next_sibling_clone) | |
4969 | node = node->clone_of; | |
4970 | if (node != id->dst_node) | |
4971 | node = node->next_sibling_clone; | |
4972 | } | |
4973 | } | |
4974 | } | |
4975 | ||
19734dd8 RL |
4976 | /* Create a copy of a function's tree. |
4977 | OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes | |
4978 | of the original function and the new copied function | |
b8698a0f L |
4979 | respectively. In case we want to replace a DECL |
4980 | tree with another tree while duplicating the function's | |
4981 | body, TREE_MAP represents the mapping between these | |
ea99e0be | 4982 | trees. If UPDATE_CLONES is set, the call_stmt fields |
91382288 JH |
4983 | of edges of clones of the function will be updated. |
4984 | ||
4985 | If non-NULL ARGS_TO_SKIP determine function parameters to remove | |
4986 | from new version. | |
4987 | If non-NULL BLOCK_TO_COPY determine what basic blocks to copy. | |
4988 | If non_NULL NEW_ENTRY determine new entry BB of the clone. | |
4989 | */ | |
19734dd8 | 4990 | void |
27dbd3ac RH |
4991 | tree_function_versioning (tree old_decl, tree new_decl, |
4992 | VEC(ipa_replace_map_p,gc)* tree_map, | |
91382288 JH |
4993 | bool update_clones, bitmap args_to_skip, |
4994 | bitmap blocks_to_copy, basic_block new_entry) | |
19734dd8 RL |
4995 | { |
4996 | struct cgraph_node *old_version_node; | |
4997 | struct cgraph_node *new_version_node; | |
1b369fae | 4998 | copy_body_data id; |
110cfe1c | 4999 | tree p; |
19734dd8 RL |
5000 | unsigned i; |
5001 | struct ipa_replace_map *replace_info; | |
b5b8b0ac | 5002 | basic_block old_entry_block, bb; |
0f1961a2 JH |
5003 | VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10); |
5004 | ||
873aa8f5 | 5005 | tree old_current_function_decl = current_function_decl; |
0f1961a2 | 5006 | tree vars = NULL_TREE; |
19734dd8 RL |
5007 | |
5008 | gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL | |
5009 | && TREE_CODE (new_decl) == FUNCTION_DECL); | |
5010 | DECL_POSSIBLY_INLINED (old_decl) = 1; | |
5011 | ||
5012 | old_version_node = cgraph_node (old_decl); | |
5013 | new_version_node = cgraph_node (new_decl); | |
5014 | ||
a3aadcc5 JH |
5015 | /* Output the inlining info for this abstract function, since it has been |
5016 | inlined. If we don't do this now, we can lose the information about the | |
5017 | variables in the function when the blocks get blown away as soon as we | |
5018 | remove the cgraph node. */ | |
5019 | (*debug_hooks->outlining_inline_function) (old_decl); | |
5020 | ||
19734dd8 RL |
5021 | DECL_ARTIFICIAL (new_decl) = 1; |
5022 | DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl); | |
f9417da1 | 5023 | DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl); |
19734dd8 | 5024 | |
3d283195 JH |
5025 | /* Prepare the data structures for the tree copy. */ |
5026 | memset (&id, 0, sizeof (id)); | |
5027 | ||
19734dd8 | 5028 | /* Generate a new name for the new version. */ |
9187e02d | 5029 | id.statements_to_fold = pointer_set_create (); |
b5b8b0ac | 5030 | |
6be42dd4 | 5031 | id.decl_map = pointer_map_create (); |
b5b8b0ac | 5032 | id.debug_map = NULL; |
1b369fae RH |
5033 | id.src_fn = old_decl; |
5034 | id.dst_fn = new_decl; | |
5035 | id.src_node = old_version_node; | |
5036 | id.dst_node = new_version_node; | |
5037 | id.src_cfun = DECL_STRUCT_FUNCTION (old_decl); | |
0e3776db JH |
5038 | if (id.src_node->ipa_transforms_to_apply) |
5039 | { | |
5040 | VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply; | |
5041 | unsigned int i; | |
5042 | ||
5043 | id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap, | |
5044 | id.src_node->ipa_transforms_to_apply); | |
5045 | for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++) | |
5046 | VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply, | |
5047 | VEC_index (ipa_opt_pass, | |
5048 | old_transforms_to_apply, | |
5049 | i)); | |
5050 | } | |
b8698a0f | 5051 | |
1b369fae RH |
5052 | id.copy_decl = copy_decl_no_change; |
5053 | id.transform_call_graph_edges | |
5054 | = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE; | |
5055 | id.transform_new_cfg = true; | |
5056 | id.transform_return_to_modify = false; | |
9ff420f1 | 5057 | id.transform_lang_insert_block = NULL; |
1b369fae | 5058 | |
19734dd8 | 5059 | current_function_decl = new_decl; |
110cfe1c JH |
5060 | old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION |
5061 | (DECL_STRUCT_FUNCTION (old_decl)); | |
5062 | initialize_cfun (new_decl, old_decl, | |
0d63a740 | 5063 | old_entry_block->count); |
1755aad0 RG |
5064 | DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta |
5065 | = id.src_cfun->gimple_df->ipa_pta; | |
110cfe1c | 5066 | push_cfun (DECL_STRUCT_FUNCTION (new_decl)); |
b8698a0f | 5067 | |
19734dd8 RL |
5068 | /* Copy the function's static chain. */ |
5069 | p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl; | |
5070 | if (p) | |
5071 | DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl = | |
5072 | copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl, | |
5073 | &id); | |
b8698a0f | 5074 | |
19734dd8 RL |
5075 | /* If there's a tree_map, prepare for substitution. */ |
5076 | if (tree_map) | |
9187e02d | 5077 | for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++) |
19734dd8 | 5078 | { |
0f1961a2 | 5079 | gimple init; |
9187e02d | 5080 | replace_info = VEC_index (ipa_replace_map_p, tree_map, i); |
1b369fae | 5081 | if (replace_info->replace_p) |
00fc2333 | 5082 | { |
657c0925 | 5083 | tree op = replace_info->new_tree; |
922f15c2 JH |
5084 | if (!replace_info->old_tree) |
5085 | { | |
5086 | int i = replace_info->parm_num; | |
5087 | tree parm; | |
910ad8de | 5088 | for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm)) |
922f15c2 JH |
5089 | i --; |
5090 | replace_info->old_tree = parm; | |
5091 | } | |
5092 | ||
657c0925 JH |
5093 | |
5094 | STRIP_NOPS (op); | |
5095 | ||
5096 | if (TREE_CODE (op) == VIEW_CONVERT_EXPR) | |
5097 | op = TREE_OPERAND (op, 0); | |
b8698a0f | 5098 | |
657c0925 | 5099 | if (TREE_CODE (op) == ADDR_EXPR) |
00fc2333 | 5100 | { |
657c0925 | 5101 | op = TREE_OPERAND (op, 0); |
00fc2333 JH |
5102 | while (handled_component_p (op)) |
5103 | op = TREE_OPERAND (op, 0); | |
5104 | if (TREE_CODE (op) == VAR_DECL) | |
5105 | add_referenced_var (op); | |
5106 | } | |
0f1961a2 JH |
5107 | gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL); |
5108 | init = setup_one_parameter (&id, replace_info->old_tree, | |
5109 | replace_info->new_tree, id.src_fn, | |
5110 | NULL, | |
5111 | &vars); | |
5112 | if (init) | |
5113 | VEC_safe_push (gimple, heap, init_stmts, init); | |
00fc2333 | 5114 | } |
19734dd8 | 5115 | } |
eb50f5f4 JH |
5116 | /* Copy the function's arguments. */ |
5117 | if (DECL_ARGUMENTS (old_decl) != NULL_TREE) | |
5118 | DECL_ARGUMENTS (new_decl) = | |
5119 | copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id, | |
5120 | args_to_skip, &vars); | |
b8698a0f | 5121 | |
eb50f5f4 | 5122 | DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id); |
b8698a0f | 5123 | |
0f1961a2 | 5124 | declare_inline_vars (DECL_INITIAL (new_decl), vars); |
9187e02d | 5125 | |
c021f10b | 5126 | if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls)) |
19734dd8 | 5127 | /* Add local vars. */ |
c021f10b | 5128 | add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id, false); |
b8698a0f | 5129 | |
19734dd8 | 5130 | /* Copy the Function's body. */ |
0d63a740 | 5131 | copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE, |
91382288 | 5132 | ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry); |
b8698a0f | 5133 | |
19734dd8 RL |
5134 | if (DECL_RESULT (old_decl) != NULL_TREE) |
5135 | { | |
5136 | tree *res_decl = &DECL_RESULT (old_decl); | |
5137 | DECL_RESULT (new_decl) = remap_decl (*res_decl, &id); | |
5138 | lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl)); | |
5139 | } | |
b8698a0f | 5140 | |
19734dd8 RL |
5141 | /* Renumber the lexical scoping (non-code) blocks consecutively. */ |
5142 | number_blocks (new_decl); | |
5143 | ||
b5b8b0ac AO |
5144 | /* We want to create the BB unconditionally, so that the addition of |
5145 | debug stmts doesn't affect BB count, which may in the end cause | |
5146 | codegen differences. */ | |
5147 | bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR)); | |
5148 | while (VEC_length (gimple, init_stmts)) | |
5149 | insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts)); | |
08ad1d6d | 5150 | update_clone_info (&id); |
0f1961a2 | 5151 | |
27dbd3ac RH |
5152 | /* Remap the nonlocal_goto_save_area, if any. */ |
5153 | if (cfun->nonlocal_goto_save_area) | |
5154 | { | |
5155 | struct walk_stmt_info wi; | |
5156 | ||
5157 | memset (&wi, 0, sizeof (wi)); | |
5158 | wi.info = &id; | |
5159 | walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL); | |
5160 | } | |
5161 | ||
19734dd8 | 5162 | /* Clean up. */ |
6be42dd4 | 5163 | pointer_map_destroy (id.decl_map); |
b5b8b0ac AO |
5164 | if (id.debug_map) |
5165 | pointer_map_destroy (id.debug_map); | |
5006671f RG |
5166 | free_dominance_info (CDI_DOMINATORS); |
5167 | free_dominance_info (CDI_POST_DOMINATORS); | |
9187e02d JH |
5168 | |
5169 | fold_marked_statements (0, id.statements_to_fold); | |
5170 | pointer_set_destroy (id.statements_to_fold); | |
5171 | fold_cond_expr_cond (); | |
5172 | delete_unreachable_blocks_update_callgraph (&id); | |
99b766fc JH |
5173 | if (id.dst_node->analyzed) |
5174 | cgraph_rebuild_references (); | |
9187e02d | 5175 | update_ssa (TODO_update_ssa); |
b35366ce JH |
5176 | |
5177 | /* After partial cloning we need to rescale frequencies, so they are | |
5178 | within proper range in the cloned function. */ | |
5179 | if (new_entry) | |
5180 | { | |
5181 | struct cgraph_edge *e; | |
5182 | rebuild_frequencies (); | |
5183 | ||
5184 | new_version_node->count = ENTRY_BLOCK_PTR->count; | |
5185 | for (e = new_version_node->callees; e; e = e->next_callee) | |
5186 | { | |
5187 | basic_block bb = gimple_bb (e->call_stmt); | |
02ec6988 MJ |
5188 | e->frequency = compute_call_stmt_bb_frequency (current_function_decl, |
5189 | bb); | |
5190 | e->count = bb->count; | |
5191 | } | |
5192 | for (e = new_version_node->indirect_calls; e; e = e->next_callee) | |
5193 | { | |
5194 | basic_block bb = gimple_bb (e->call_stmt); | |
5195 | e->frequency = compute_call_stmt_bb_frequency (current_function_decl, | |
5196 | bb); | |
b35366ce JH |
5197 | e->count = bb->count; |
5198 | } | |
5199 | } | |
5200 | ||
9187e02d JH |
5201 | free_dominance_info (CDI_DOMINATORS); |
5202 | free_dominance_info (CDI_POST_DOMINATORS); | |
5203 | ||
b5b8b0ac | 5204 | gcc_assert (!id.debug_stmts); |
0f1961a2 | 5205 | VEC_free (gimple, heap, init_stmts); |
110cfe1c | 5206 | pop_cfun (); |
873aa8f5 JH |
5207 | current_function_decl = old_current_function_decl; |
5208 | gcc_assert (!current_function_decl | |
5209 | || DECL_STRUCT_FUNCTION (current_function_decl) == cfun); | |
19734dd8 RL |
5210 | return; |
5211 | } | |
5212 | ||
f82a627c EB |
5213 | /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate |
5214 | the callee and return the inlined body on success. */ | |
5215 | ||
5216 | tree | |
5217 | maybe_inline_call_in_expr (tree exp) | |
5218 | { | |
5219 | tree fn = get_callee_fndecl (exp); | |
5220 | ||
5221 | /* We can only try to inline "const" functions. */ | |
5222 | if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn)) | |
5223 | { | |
5224 | struct pointer_map_t *decl_map = pointer_map_create (); | |
5225 | call_expr_arg_iterator iter; | |
5226 | copy_body_data id; | |
5227 | tree param, arg, t; | |
5228 | ||
5229 | /* Remap the parameters. */ | |
5230 | for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter); | |
5231 | param; | |
910ad8de | 5232 | param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter)) |
f82a627c EB |
5233 | *pointer_map_insert (decl_map, param) = arg; |
5234 | ||
5235 | memset (&id, 0, sizeof (id)); | |
5236 | id.src_fn = fn; | |
5237 | id.dst_fn = current_function_decl; | |
5238 | id.src_cfun = DECL_STRUCT_FUNCTION (fn); | |
5239 | id.decl_map = decl_map; | |
5240 | ||
5241 | id.copy_decl = copy_decl_no_change; | |
5242 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
5243 | id.transform_new_cfg = false; | |
5244 | id.transform_return_to_modify = true; | |
5245 | id.transform_lang_insert_block = false; | |
5246 | ||
5247 | /* Make sure not to unshare trees behind the front-end's back | |
5248 | since front-end specific mechanisms may rely on sharing. */ | |
5249 | id.regimplify = false; | |
5250 | id.do_not_unshare = true; | |
5251 | ||
5252 | /* We're not inside any EH region. */ | |
1d65f45c | 5253 | id.eh_lp_nr = 0; |
f82a627c EB |
5254 | |
5255 | t = copy_tree_body (&id); | |
5256 | pointer_map_destroy (decl_map); | |
5257 | ||
5258 | /* We can only return something suitable for use in a GENERIC | |
5259 | expression tree. */ | |
5260 | if (TREE_CODE (t) == MODIFY_EXPR) | |
5261 | return TREE_OPERAND (t, 1); | |
5262 | } | |
5263 | ||
5264 | return NULL_TREE; | |
5265 | } | |
5266 | ||
52dd234b RH |
5267 | /* Duplicate a type, fields and all. */ |
5268 | ||
5269 | tree | |
5270 | build_duplicate_type (tree type) | |
5271 | { | |
1b369fae | 5272 | struct copy_body_data id; |
52dd234b RH |
5273 | |
5274 | memset (&id, 0, sizeof (id)); | |
1b369fae RH |
5275 | id.src_fn = current_function_decl; |
5276 | id.dst_fn = current_function_decl; | |
5277 | id.src_cfun = cfun; | |
6be42dd4 | 5278 | id.decl_map = pointer_map_create (); |
b5b8b0ac | 5279 | id.debug_map = NULL; |
4009f2e7 | 5280 | id.copy_decl = copy_decl_no_change; |
52dd234b RH |
5281 | |
5282 | type = remap_type_1 (type, &id); | |
5283 | ||
6be42dd4 | 5284 | pointer_map_destroy (id.decl_map); |
b5b8b0ac AO |
5285 | if (id.debug_map) |
5286 | pointer_map_destroy (id.debug_map); | |
52dd234b | 5287 | |
f31c9f09 DG |
5288 | TYPE_CANONICAL (type) = type; |
5289 | ||
52dd234b RH |
5290 | return type; |
5291 | } | |
ab442df7 MM |
5292 | |
5293 | /* Return whether it is safe to inline a function because it used different | |
6eb29714 XDL |
5294 | target specific options or call site actual types mismatch parameter types. |
5295 | E is the call edge to be checked. */ | |
ab442df7 | 5296 | bool |
6eb29714 | 5297 | tree_can_inline_p (struct cgraph_edge *e) |
ab442df7 | 5298 | { |
5779e713 MM |
5299 | #if 0 |
5300 | /* This causes a regression in SPEC in that it prevents a cold function from | |
5301 | inlining a hot function. Perhaps this should only apply to functions | |
5302 | that the user declares hot/cold/optimize explicitly. */ | |
5303 | ||
ab442df7 MM |
5304 | /* Don't inline a function with a higher optimization level than the |
5305 | caller, or with different space constraints (hot/cold functions). */ | |
5306 | tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller); | |
5307 | tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee); | |
5308 | ||
5309 | if (caller_tree != callee_tree) | |
5310 | { | |
5311 | struct cl_optimization *caller_opt | |
5312 | = TREE_OPTIMIZATION ((caller_tree) | |
5313 | ? caller_tree | |
5314 | : optimization_default_node); | |
5315 | ||
5316 | struct cl_optimization *callee_opt | |
5317 | = TREE_OPTIMIZATION ((callee_tree) | |
5318 | ? callee_tree | |
5319 | : optimization_default_node); | |
5320 | ||
5321 | if ((caller_opt->optimize > callee_opt->optimize) | |
5322 | || (caller_opt->optimize_size != callee_opt->optimize_size)) | |
5323 | return false; | |
5324 | } | |
5779e713 | 5325 | #endif |
8fd8a06f | 5326 | tree caller, callee, lhs; |
6eb29714 XDL |
5327 | |
5328 | caller = e->caller->decl; | |
5329 | callee = e->callee->decl; | |
ab442df7 | 5330 | |
8f4f502f EB |
5331 | /* First check that inlining isn't simply forbidden in this case. */ |
5332 | if (inline_forbidden_into_p (caller, callee)) | |
f9417da1 RG |
5333 | { |
5334 | e->inline_failed = CIF_UNSPECIFIED; | |
5335 | gimple_call_set_cannot_inline (e->call_stmt, true); | |
5336 | return false; | |
5337 | } | |
5338 | ||
ab442df7 | 5339 | /* Allow the backend to decide if inlining is ok. */ |
6eb29714 XDL |
5340 | if (!targetm.target_option.can_inline_p (caller, callee)) |
5341 | { | |
5342 | e->inline_failed = CIF_TARGET_OPTION_MISMATCH; | |
5343 | gimple_call_set_cannot_inline (e->call_stmt, true); | |
d7f09764 | 5344 | e->call_stmt_cannot_inline_p = true; |
6eb29714 XDL |
5345 | return false; |
5346 | } | |
5347 | ||
8fd8a06f RG |
5348 | /* Do not inline calls where we cannot triviall work around mismatches |
5349 | in argument or return types. */ | |
d7f09764 | 5350 | if (e->call_stmt |
8fd8a06f RG |
5351 | && ((DECL_RESULT (callee) |
5352 | && !DECL_BY_REFERENCE (DECL_RESULT (callee)) | |
5353 | && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE | |
5354 | && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)), | |
5355 | TREE_TYPE (lhs)) | |
5356 | && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs)) | |
5357 | || !gimple_check_call_args (e->call_stmt))) | |
6eb29714 XDL |
5358 | { |
5359 | e->inline_failed = CIF_MISMATCHED_ARGUMENTS; | |
5360 | gimple_call_set_cannot_inline (e->call_stmt, true); | |
d7f09764 | 5361 | e->call_stmt_cannot_inline_p = true; |
6eb29714 XDL |
5362 | return false; |
5363 | } | |
5364 | ||
5365 | return true; | |
ab442df7 | 5366 | } |