]>
Commit | Line | Data |
---|---|---|
ac534736 | 1 | /* Tree inlining. |
65401a0b | 2 | Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 |
ebb07520 | 3 | Free Software Foundation, Inc. |
588d3ade AO |
4 | Contributed by Alexandre Oliva <aoliva@redhat.com> |
5 | ||
54a7b573 | 6 | This file is part of GCC. |
588d3ade | 7 | |
54a7b573 | 8 | GCC is free software; you can redistribute it and/or modify |
588d3ade | 9 | it under the terms of the GNU General Public License as published by |
9dcd6f09 | 10 | the Free Software Foundation; either version 3, or (at your option) |
588d3ade AO |
11 | any later version. |
12 | ||
54a7b573 | 13 | GCC is distributed in the hope that it will be useful, |
588d3ade AO |
14 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
15 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
16 | GNU General Public License for more details. | |
17 | ||
18 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
588d3ade AO |
21 | |
22 | #include "config.h" | |
23 | #include "system.h" | |
4977bab6 ZW |
24 | #include "coretypes.h" |
25 | #include "tm.h" | |
69dcadff | 26 | #include "toplev.h" |
588d3ade AO |
27 | #include "tree.h" |
28 | #include "tree-inline.h" | |
d4e4baa9 AO |
29 | #include "rtl.h" |
30 | #include "expr.h" | |
31 | #include "flags.h" | |
32 | #include "params.h" | |
33 | #include "input.h" | |
34 | #include "insn-config.h" | |
d4e4baa9 AO |
35 | #include "varray.h" |
36 | #include "hashtab.h" | |
d23c55c2 | 37 | #include "langhooks.h" |
e21aff8a SB |
38 | #include "basic-block.h" |
39 | #include "tree-iterator.h" | |
1c4a429a | 40 | #include "cgraph.h" |
ddd2d57e | 41 | #include "intl.h" |
6de9cd9a | 42 | #include "tree-mudflap.h" |
089efaa4 | 43 | #include "tree-flow.h" |
18c6ada9 | 44 | #include "function.h" |
e21aff8a SB |
45 | #include "ggc.h" |
46 | #include "tree-flow.h" | |
6de9cd9a | 47 | #include "diagnostic.h" |
e21aff8a | 48 | #include "except.h" |
1eb3331e | 49 | #include "debug.h" |
e21aff8a | 50 | #include "pointer-set.h" |
19734dd8 | 51 | #include "ipa-prop.h" |
6946b3f7 | 52 | #include "value-prof.h" |
110cfe1c | 53 | #include "tree-pass.h" |
18177c7e RG |
54 | #include "target.h" |
55 | #include "integrate.h" | |
d4e4baa9 | 56 | |
6de9cd9a DN |
57 | /* I'm not real happy about this, but we need to handle gimple and |
58 | non-gimple trees. */ | |
726a989a | 59 | #include "gimple.h" |
588d3ade | 60 | |
1b369fae | 61 | /* Inlining, Cloning, Versioning, Parallelization |
e21aff8a SB |
62 | |
63 | Inlining: a function body is duplicated, but the PARM_DECLs are | |
64 | remapped into VAR_DECLs, and non-void RETURN_EXPRs become | |
726a989a | 65 | MODIFY_EXPRs that store to a dedicated returned-value variable. |
e21aff8a SB |
66 | The duplicated eh_region info of the copy will later be appended |
67 | to the info for the caller; the eh_region info in copied throwing | |
68 | statements and RESX_EXPRs is adjusted accordingly. | |
69 | ||
e21aff8a SB |
70 | Cloning: (only in C++) We have one body for a con/de/structor, and |
71 | multiple function decls, each with a unique parameter list. | |
72 | Duplicate the body, using the given splay tree; some parameters | |
73 | will become constants (like 0 or 1). | |
74 | ||
1b369fae RH |
75 | Versioning: a function body is duplicated and the result is a new |
76 | function rather than into blocks of an existing function as with | |
77 | inlining. Some parameters will become constants. | |
78 | ||
79 | Parallelization: a region of a function is duplicated resulting in | |
80 | a new function. Variables may be replaced with complex expressions | |
81 | to enable shared variable semantics. | |
82 | ||
e21aff8a SB |
83 | All of these will simultaneously lookup any callgraph edges. If |
84 | we're going to inline the duplicated function body, and the given | |
85 | function has some cloned callgraph nodes (one for each place this | |
86 | function will be inlined) those callgraph edges will be duplicated. | |
1b369fae | 87 | If we're cloning the body, those callgraph edges will be |
e21aff8a SB |
88 | updated to point into the new body. (Note that the original |
89 | callgraph node and edge list will not be altered.) | |
90 | ||
726a989a | 91 | See the CALL_EXPR handling case in copy_tree_body_r (). */ |
e21aff8a | 92 | |
d4e4baa9 AO |
93 | /* To Do: |
94 | ||
95 | o In order to make inlining-on-trees work, we pessimized | |
96 | function-local static constants. In particular, they are now | |
97 | always output, even when not addressed. Fix this by treating | |
98 | function-local static constants just like global static | |
99 | constants; the back-end already knows not to output them if they | |
100 | are not needed. | |
101 | ||
102 | o Provide heuristics to clamp inlining of recursive template | |
103 | calls? */ | |
104 | ||
7f9bc51b ZD |
105 | |
106 | /* Weights that estimate_num_insns uses for heuristics in inlining. */ | |
107 | ||
108 | eni_weights eni_inlining_weights; | |
109 | ||
110 | /* Weights that estimate_num_insns uses to estimate the size of the | |
111 | produced code. */ | |
112 | ||
113 | eni_weights eni_size_weights; | |
114 | ||
115 | /* Weights that estimate_num_insns uses to estimate the time necessary | |
116 | to execute the produced code. */ | |
117 | ||
118 | eni_weights eni_time_weights; | |
119 | ||
d4e4baa9 AO |
120 | /* Prototypes. */ |
121 | ||
1b369fae | 122 | static tree declare_return_variable (copy_body_data *, tree, tree, tree *); |
b3c3af2f | 123 | static bool inlinable_function_p (tree); |
1b369fae | 124 | static void remap_block (tree *, copy_body_data *); |
1b369fae | 125 | static void copy_bind_expr (tree *, int *, copy_body_data *); |
6de9cd9a | 126 | static tree mark_local_for_remap_r (tree *, int *, void *); |
19114537 | 127 | static void unsave_expr_1 (tree); |
6de9cd9a | 128 | static tree unsave_r (tree *, int *, void *); |
e21aff8a | 129 | static void declare_inline_vars (tree, tree); |
892c7e1e | 130 | static void remap_save_expr (tree *, void *, int *); |
4a283090 | 131 | static void prepend_lexical_block (tree current_block, tree new_block); |
1b369fae | 132 | static tree copy_decl_to_var (tree, copy_body_data *); |
c08cd4c1 | 133 | static tree copy_result_decl_to_var (tree, copy_body_data *); |
1b369fae | 134 | static tree copy_decl_maybe_to_var (tree, copy_body_data *); |
726a989a | 135 | static gimple remap_gimple_stmt (gimple, copy_body_data *); |
e21aff8a | 136 | |
5e20bdd7 JZ |
137 | /* Insert a tree->tree mapping for ID. Despite the name suggests |
138 | that the trees should be variables, it is used for more than that. */ | |
139 | ||
1b369fae RH |
140 | void |
141 | insert_decl_map (copy_body_data *id, tree key, tree value) | |
5e20bdd7 | 142 | { |
6be42dd4 | 143 | *pointer_map_insert (id->decl_map, key) = value; |
5e20bdd7 JZ |
144 | |
145 | /* Always insert an identity map as well. If we see this same new | |
146 | node again, we won't want to duplicate it a second time. */ | |
147 | if (key != value) | |
6be42dd4 | 148 | *pointer_map_insert (id->decl_map, value) = value; |
5e20bdd7 JZ |
149 | } |
150 | ||
110cfe1c JH |
151 | /* Construct new SSA name for old NAME. ID is the inline context. */ |
152 | ||
153 | static tree | |
154 | remap_ssa_name (tree name, copy_body_data *id) | |
155 | { | |
82d6e6fc | 156 | tree new_tree; |
6be42dd4 | 157 | tree *n; |
110cfe1c JH |
158 | |
159 | gcc_assert (TREE_CODE (name) == SSA_NAME); | |
160 | ||
6be42dd4 | 161 | n = (tree *) pointer_map_contains (id->decl_map, name); |
110cfe1c | 162 | if (n) |
129a37fc | 163 | return unshare_expr (*n); |
110cfe1c JH |
164 | |
165 | /* Do not set DEF_STMT yet as statement is not copied yet. We do that | |
166 | in copy_bb. */ | |
82d6e6fc | 167 | new_tree = remap_decl (SSA_NAME_VAR (name), id); |
726a989a | 168 | |
110cfe1c JH |
169 | /* We might've substituted constant or another SSA_NAME for |
170 | the variable. | |
171 | ||
172 | Replace the SSA name representing RESULT_DECL by variable during | |
173 | inlining: this saves us from need to introduce PHI node in a case | |
174 | return value is just partly initialized. */ | |
82d6e6fc | 175 | if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL) |
110cfe1c JH |
176 | && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL |
177 | || !id->transform_return_to_modify)) | |
178 | { | |
82d6e6fc KG |
179 | new_tree = make_ssa_name (new_tree, NULL); |
180 | insert_decl_map (id, name, new_tree); | |
181 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree) | |
110cfe1c | 182 | = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name); |
82d6e6fc | 183 | TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree)); |
726a989a | 184 | if (gimple_nop_p (SSA_NAME_DEF_STMT (name))) |
045685a9 JH |
185 | { |
186 | /* By inlining function having uninitialized variable, we might | |
187 | extend the lifetime (variable might get reused). This cause | |
188 | ICE in the case we end up extending lifetime of SSA name across | |
fa10beec | 189 | abnormal edge, but also increase register pressure. |
045685a9 | 190 | |
726a989a RB |
191 | We simply initialize all uninitialized vars by 0 except |
192 | for case we are inlining to very first BB. We can avoid | |
193 | this for all BBs that are not inside strongly connected | |
194 | regions of the CFG, but this is expensive to test. */ | |
195 | if (id->entry_bb | |
196 | && is_gimple_reg (SSA_NAME_VAR (name)) | |
045685a9 | 197 | && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL |
0723b99a | 198 | && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest |
045685a9 JH |
199 | || EDGE_COUNT (id->entry_bb->preds) != 1)) |
200 | { | |
726a989a RB |
201 | gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb); |
202 | gimple init_stmt; | |
203 | ||
82d6e6fc KG |
204 | init_stmt = gimple_build_assign (new_tree, |
205 | fold_convert (TREE_TYPE (new_tree), | |
045685a9 | 206 | integer_zero_node)); |
726a989a | 207 | gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT); |
82d6e6fc | 208 | SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0; |
045685a9 JH |
209 | } |
210 | else | |
211 | { | |
82d6e6fc | 212 | SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop (); |
726a989a RB |
213 | if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) |
214 | == name) | |
82d6e6fc | 215 | set_default_def (SSA_NAME_VAR (new_tree), new_tree); |
045685a9 JH |
216 | } |
217 | } | |
110cfe1c JH |
218 | } |
219 | else | |
82d6e6fc KG |
220 | insert_decl_map (id, name, new_tree); |
221 | return new_tree; | |
110cfe1c JH |
222 | } |
223 | ||
e21aff8a | 224 | /* Remap DECL during the copying of the BLOCK tree for the function. */ |
d4e4baa9 | 225 | |
1b369fae RH |
226 | tree |
227 | remap_decl (tree decl, copy_body_data *id) | |
d4e4baa9 | 228 | { |
6be42dd4 | 229 | tree *n; |
e21aff8a SB |
230 | tree fn; |
231 | ||
232 | /* We only remap local variables in the current function. */ | |
1b369fae | 233 | fn = id->src_fn; |
3c2a7a6a | 234 | |
e21aff8a SB |
235 | /* See if we have remapped this declaration. */ |
236 | ||
6be42dd4 | 237 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
e21aff8a SB |
238 | |
239 | /* If we didn't already have an equivalent for this declaration, | |
240 | create one now. */ | |
d4e4baa9 AO |
241 | if (!n) |
242 | { | |
d4e4baa9 | 243 | /* Make a copy of the variable or label. */ |
1b369fae | 244 | tree t = id->copy_decl (decl, id); |
19734dd8 | 245 | |
596b98ce AO |
246 | /* Remember it, so that if we encounter this local entity again |
247 | we can reuse this copy. Do this early because remap_type may | |
248 | need this decl for TYPE_STUB_DECL. */ | |
249 | insert_decl_map (id, decl, t); | |
250 | ||
1b369fae RH |
251 | if (!DECL_P (t)) |
252 | return t; | |
253 | ||
3c2a7a6a RH |
254 | /* Remap types, if necessary. */ |
255 | TREE_TYPE (t) = remap_type (TREE_TYPE (t), id); | |
256 | if (TREE_CODE (t) == TYPE_DECL) | |
257 | DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id); | |
3c2a7a6a RH |
258 | |
259 | /* Remap sizes as necessary. */ | |
726a989a RB |
260 | walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL); |
261 | walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL); | |
d4e4baa9 | 262 | |
8c27b7d4 | 263 | /* If fields, do likewise for offset and qualifier. */ |
5377d5ba RK |
264 | if (TREE_CODE (t) == FIELD_DECL) |
265 | { | |
726a989a | 266 | walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL); |
5377d5ba | 267 | if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE) |
726a989a | 268 | walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL); |
5377d5ba RK |
269 | } |
270 | ||
110cfe1c JH |
271 | if (cfun && gimple_in_ssa_p (cfun) |
272 | && (TREE_CODE (t) == VAR_DECL | |
273 | || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL)) | |
274 | { | |
275 | tree def = gimple_default_def (id->src_cfun, decl); | |
276 | get_var_ann (t); | |
277 | if (TREE_CODE (decl) != PARM_DECL && def) | |
278 | { | |
279 | tree map = remap_ssa_name (def, id); | |
280 | /* Watch out RESULT_DECLs whose SSA names map directly | |
281 | to them. */ | |
045685a9 | 282 | if (TREE_CODE (map) == SSA_NAME |
726a989a | 283 | && gimple_nop_p (SSA_NAME_DEF_STMT (map))) |
110cfe1c JH |
284 | set_default_def (t, map); |
285 | } | |
286 | add_referenced_var (t); | |
287 | } | |
5e20bdd7 | 288 | return t; |
d4e4baa9 AO |
289 | } |
290 | ||
6be42dd4 | 291 | return unshare_expr (*n); |
d4e4baa9 AO |
292 | } |
293 | ||
3c2a7a6a | 294 | static tree |
1b369fae | 295 | remap_type_1 (tree type, copy_body_data *id) |
3c2a7a6a | 296 | { |
82d6e6fc | 297 | tree new_tree, t; |
3c2a7a6a | 298 | |
ed397c43 RK |
299 | /* We do need a copy. build and register it now. If this is a pointer or |
300 | reference type, remap the designated type and make a new pointer or | |
301 | reference type. */ | |
302 | if (TREE_CODE (type) == POINTER_TYPE) | |
303 | { | |
82d6e6fc | 304 | new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id), |
ed397c43 RK |
305 | TYPE_MODE (type), |
306 | TYPE_REF_CAN_ALIAS_ALL (type)); | |
82d6e6fc KG |
307 | insert_decl_map (id, type, new_tree); |
308 | return new_tree; | |
ed397c43 RK |
309 | } |
310 | else if (TREE_CODE (type) == REFERENCE_TYPE) | |
311 | { | |
82d6e6fc | 312 | new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id), |
ed397c43 RK |
313 | TYPE_MODE (type), |
314 | TYPE_REF_CAN_ALIAS_ALL (type)); | |
82d6e6fc KG |
315 | insert_decl_map (id, type, new_tree); |
316 | return new_tree; | |
ed397c43 RK |
317 | } |
318 | else | |
82d6e6fc | 319 | new_tree = copy_node (type); |
ed397c43 | 320 | |
82d6e6fc | 321 | insert_decl_map (id, type, new_tree); |
3c2a7a6a RH |
322 | |
323 | /* This is a new type, not a copy of an old type. Need to reassociate | |
324 | variants. We can handle everything except the main variant lazily. */ | |
325 | t = TYPE_MAIN_VARIANT (type); | |
326 | if (type != t) | |
327 | { | |
328 | t = remap_type (t, id); | |
82d6e6fc KG |
329 | TYPE_MAIN_VARIANT (new_tree) = t; |
330 | TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t); | |
331 | TYPE_NEXT_VARIANT (t) = new_tree; | |
3c2a7a6a RH |
332 | } |
333 | else | |
334 | { | |
82d6e6fc KG |
335 | TYPE_MAIN_VARIANT (new_tree) = new_tree; |
336 | TYPE_NEXT_VARIANT (new_tree) = NULL; | |
3c2a7a6a RH |
337 | } |
338 | ||
596b98ce | 339 | if (TYPE_STUB_DECL (type)) |
82d6e6fc | 340 | TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id); |
596b98ce | 341 | |
3c2a7a6a | 342 | /* Lazily create pointer and reference types. */ |
82d6e6fc KG |
343 | TYPE_POINTER_TO (new_tree) = NULL; |
344 | TYPE_REFERENCE_TO (new_tree) = NULL; | |
3c2a7a6a | 345 | |
82d6e6fc | 346 | switch (TREE_CODE (new_tree)) |
3c2a7a6a RH |
347 | { |
348 | case INTEGER_TYPE: | |
349 | case REAL_TYPE: | |
325217ed | 350 | case FIXED_POINT_TYPE: |
3c2a7a6a RH |
351 | case ENUMERAL_TYPE: |
352 | case BOOLEAN_TYPE: | |
82d6e6fc | 353 | t = TYPE_MIN_VALUE (new_tree); |
3c2a7a6a | 354 | if (t && TREE_CODE (t) != INTEGER_CST) |
82d6e6fc | 355 | walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL); |
1c9766da | 356 | |
82d6e6fc | 357 | t = TYPE_MAX_VALUE (new_tree); |
3c2a7a6a | 358 | if (t && TREE_CODE (t) != INTEGER_CST) |
82d6e6fc KG |
359 | walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL); |
360 | return new_tree; | |
9f63daea | 361 | |
3c2a7a6a | 362 | case FUNCTION_TYPE: |
82d6e6fc KG |
363 | TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id); |
364 | walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL); | |
365 | return new_tree; | |
3c2a7a6a RH |
366 | |
367 | case ARRAY_TYPE: | |
82d6e6fc KG |
368 | TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id); |
369 | TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id); | |
3c2a7a6a RH |
370 | break; |
371 | ||
372 | case RECORD_TYPE: | |
373 | case UNION_TYPE: | |
374 | case QUAL_UNION_TYPE: | |
52dd234b RH |
375 | { |
376 | tree f, nf = NULL; | |
377 | ||
82d6e6fc | 378 | for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f)) |
52dd234b RH |
379 | { |
380 | t = remap_decl (f, id); | |
82d6e6fc | 381 | DECL_CONTEXT (t) = new_tree; |
52dd234b RH |
382 | TREE_CHAIN (t) = nf; |
383 | nf = t; | |
384 | } | |
82d6e6fc | 385 | TYPE_FIELDS (new_tree) = nreverse (nf); |
52dd234b | 386 | } |
3c2a7a6a RH |
387 | break; |
388 | ||
3c2a7a6a RH |
389 | case OFFSET_TYPE: |
390 | default: | |
391 | /* Shouldn't have been thought variable sized. */ | |
1e128c5f | 392 | gcc_unreachable (); |
3c2a7a6a RH |
393 | } |
394 | ||
82d6e6fc KG |
395 | walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL); |
396 | walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL); | |
3c2a7a6a | 397 | |
82d6e6fc | 398 | return new_tree; |
3c2a7a6a RH |
399 | } |
400 | ||
1b369fae RH |
401 | tree |
402 | remap_type (tree type, copy_body_data *id) | |
52dd234b | 403 | { |
6be42dd4 | 404 | tree *node; |
4f5c64b8 | 405 | tree tmp; |
52dd234b RH |
406 | |
407 | if (type == NULL) | |
408 | return type; | |
409 | ||
410 | /* See if we have remapped this type. */ | |
6be42dd4 | 411 | node = (tree *) pointer_map_contains (id->decl_map, type); |
52dd234b | 412 | if (node) |
6be42dd4 | 413 | return *node; |
52dd234b RH |
414 | |
415 | /* The type only needs remapping if it's variably modified. */ | |
1b369fae | 416 | if (! variably_modified_type_p (type, id->src_fn)) |
52dd234b RH |
417 | { |
418 | insert_decl_map (id, type, type); | |
419 | return type; | |
420 | } | |
421 | ||
4f5c64b8 RG |
422 | id->remapping_type_depth++; |
423 | tmp = remap_type_1 (type, id); | |
424 | id->remapping_type_depth--; | |
425 | ||
426 | return tmp; | |
52dd234b RH |
427 | } |
428 | ||
13e4e36e L |
429 | /* Return previously remapped type of TYPE in ID. Return NULL if TYPE |
430 | is NULL or TYPE has not been remapped before. */ | |
431 | ||
432 | static tree | |
433 | remapped_type (tree type, copy_body_data *id) | |
434 | { | |
435 | tree *node; | |
436 | ||
437 | if (type == NULL) | |
438 | return type; | |
439 | ||
440 | /* See if we have remapped this type. */ | |
441 | node = (tree *) pointer_map_contains (id->decl_map, type); | |
442 | if (node) | |
443 | return *node; | |
444 | else | |
445 | return NULL; | |
446 | } | |
447 | ||
448 | /* The type only needs remapping if it's variably modified. */ | |
526d73ab JH |
449 | /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */ |
450 | ||
451 | static bool | |
452 | can_be_nonlocal (tree decl, copy_body_data *id) | |
453 | { | |
454 | /* We can not duplicate function decls. */ | |
455 | if (TREE_CODE (decl) == FUNCTION_DECL) | |
456 | return true; | |
457 | ||
458 | /* Local static vars must be non-local or we get multiple declaration | |
459 | problems. */ | |
460 | if (TREE_CODE (decl) == VAR_DECL | |
461 | && !auto_var_in_fn_p (decl, id->src_fn)) | |
462 | return true; | |
463 | ||
464 | /* At the moment dwarf2out can handle only these types of nodes. We | |
465 | can support more later. */ | |
466 | if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL) | |
467 | return false; | |
468 | ||
13e4e36e L |
469 | /* We must use global type. We call remapped_type instead of |
470 | remap_type since we don't want to remap this type here if it | |
471 | hasn't been remapped before. */ | |
472 | if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id)) | |
526d73ab JH |
473 | return false; |
474 | ||
475 | /* Wihtout SSA we can't tell if variable is used. */ | |
476 | if (!gimple_in_ssa_p (cfun)) | |
477 | return false; | |
478 | ||
479 | /* Live variables must be copied so we can attach DECL_RTL. */ | |
480 | if (var_ann (decl)) | |
481 | return false; | |
482 | ||
483 | return true; | |
484 | } | |
485 | ||
6de9cd9a | 486 | static tree |
526d73ab | 487 | remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id) |
d4e4baa9 | 488 | { |
6de9cd9a DN |
489 | tree old_var; |
490 | tree new_decls = NULL_TREE; | |
d4e4baa9 | 491 | |
6de9cd9a DN |
492 | /* Remap its variables. */ |
493 | for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var)) | |
d4e4baa9 | 494 | { |
6de9cd9a | 495 | tree new_var; |
526d73ab | 496 | tree origin_var = DECL_ORIGIN (old_var); |
6de9cd9a | 497 | |
526d73ab | 498 | if (can_be_nonlocal (old_var, id)) |
30be951a | 499 | { |
526d73ab JH |
500 | if (TREE_CODE (old_var) == VAR_DECL |
501 | && (var_ann (old_var) || !gimple_in_ssa_p (cfun))) | |
502 | cfun->local_decls = tree_cons (NULL_TREE, old_var, | |
503 | cfun->local_decls); | |
504 | if (debug_info_level > DINFO_LEVEL_TERSE | |
505 | && !DECL_IGNORED_P (old_var) | |
506 | && nonlocalized_list) | |
507 | VEC_safe_push (tree, gc, *nonlocalized_list, origin_var); | |
30be951a JH |
508 | continue; |
509 | } | |
510 | ||
6de9cd9a DN |
511 | /* Remap the variable. */ |
512 | new_var = remap_decl (old_var, id); | |
513 | ||
726a989a | 514 | /* If we didn't remap this variable, we can't mess with its |
6de9cd9a DN |
515 | TREE_CHAIN. If we remapped this variable to the return slot, it's |
516 | already declared somewhere else, so don't declare it here. */ | |
526d73ab JH |
517 | |
518 | if (new_var == id->retvar) | |
6de9cd9a | 519 | ; |
526d73ab JH |
520 | else if (!new_var) |
521 | { | |
522 | if (debug_info_level > DINFO_LEVEL_TERSE | |
523 | && !DECL_IGNORED_P (old_var) | |
524 | && nonlocalized_list) | |
525 | VEC_safe_push (tree, gc, *nonlocalized_list, origin_var); | |
526 | } | |
d4e4baa9 AO |
527 | else |
528 | { | |
1e128c5f | 529 | gcc_assert (DECL_P (new_var)); |
6de9cd9a DN |
530 | TREE_CHAIN (new_var) = new_decls; |
531 | new_decls = new_var; | |
d4e4baa9 | 532 | } |
d4e4baa9 | 533 | } |
d4e4baa9 | 534 | |
6de9cd9a DN |
535 | return nreverse (new_decls); |
536 | } | |
537 | ||
538 | /* Copy the BLOCK to contain remapped versions of the variables | |
539 | therein. And hook the new block into the block-tree. */ | |
540 | ||
541 | static void | |
1b369fae | 542 | remap_block (tree *block, copy_body_data *id) |
6de9cd9a | 543 | { |
d436bff8 AH |
544 | tree old_block; |
545 | tree new_block; | |
d436bff8 AH |
546 | tree fn; |
547 | ||
548 | /* Make the new block. */ | |
549 | old_block = *block; | |
550 | new_block = make_node (BLOCK); | |
551 | TREE_USED (new_block) = TREE_USED (old_block); | |
552 | BLOCK_ABSTRACT_ORIGIN (new_block) = old_block; | |
3e2844cb | 553 | BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block); |
526d73ab JH |
554 | BLOCK_NONLOCALIZED_VARS (new_block) |
555 | = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block)); | |
d436bff8 AH |
556 | *block = new_block; |
557 | ||
558 | /* Remap its variables. */ | |
526d73ab JH |
559 | BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), |
560 | &BLOCK_NONLOCALIZED_VARS (new_block), | |
561 | id); | |
d436bff8 | 562 | |
1b369fae RH |
563 | fn = id->dst_fn; |
564 | ||
565 | if (id->transform_lang_insert_block) | |
9ff420f1 | 566 | id->transform_lang_insert_block (new_block); |
1b369fae | 567 | |
d436bff8 | 568 | /* Remember the remapped block. */ |
6de9cd9a | 569 | insert_decl_map (id, old_block, new_block); |
d4e4baa9 AO |
570 | } |
571 | ||
acb8f212 JH |
572 | /* Copy the whole block tree and root it in id->block. */ |
573 | static tree | |
1b369fae | 574 | remap_blocks (tree block, copy_body_data *id) |
acb8f212 JH |
575 | { |
576 | tree t; | |
82d6e6fc | 577 | tree new_tree = block; |
acb8f212 JH |
578 | |
579 | if (!block) | |
580 | return NULL; | |
581 | ||
82d6e6fc KG |
582 | remap_block (&new_tree, id); |
583 | gcc_assert (new_tree != block); | |
acb8f212 | 584 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) |
4a283090 JH |
585 | prepend_lexical_block (new_tree, remap_blocks (t, id)); |
586 | /* Blocks are in arbitrary order, but make things slightly prettier and do | |
587 | not swap order when producing a copy. */ | |
588 | BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree)); | |
82d6e6fc | 589 | return new_tree; |
acb8f212 JH |
590 | } |
591 | ||
d4e4baa9 | 592 | static void |
6de9cd9a | 593 | copy_statement_list (tree *tp) |
d4e4baa9 | 594 | { |
6de9cd9a | 595 | tree_stmt_iterator oi, ni; |
82d6e6fc | 596 | tree new_tree; |
6de9cd9a | 597 | |
82d6e6fc KG |
598 | new_tree = alloc_stmt_list (); |
599 | ni = tsi_start (new_tree); | |
6de9cd9a | 600 | oi = tsi_start (*tp); |
82d6e6fc | 601 | *tp = new_tree; |
6de9cd9a DN |
602 | |
603 | for (; !tsi_end_p (oi); tsi_next (&oi)) | |
604 | tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT); | |
605 | } | |
d4e4baa9 | 606 | |
6de9cd9a | 607 | static void |
1b369fae | 608 | copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id) |
6de9cd9a DN |
609 | { |
610 | tree block = BIND_EXPR_BLOCK (*tp); | |
d4e4baa9 AO |
611 | /* Copy (and replace) the statement. */ |
612 | copy_tree_r (tp, walk_subtrees, NULL); | |
6de9cd9a DN |
613 | if (block) |
614 | { | |
615 | remap_block (&block, id); | |
616 | BIND_EXPR_BLOCK (*tp) = block; | |
617 | } | |
d4e4baa9 | 618 | |
6de9cd9a DN |
619 | if (BIND_EXPR_VARS (*tp)) |
620 | /* This will remap a lot of the same decls again, but this should be | |
621 | harmless. */ | |
526d73ab | 622 | BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id); |
d4e4baa9 AO |
623 | } |
624 | ||
726a989a RB |
625 | |
626 | /* Create a new gimple_seq by remapping all the statements in BODY | |
627 | using the inlining information in ID. */ | |
628 | ||
629 | gimple_seq | |
630 | remap_gimple_seq (gimple_seq body, copy_body_data *id) | |
631 | { | |
632 | gimple_stmt_iterator si; | |
633 | gimple_seq new_body = NULL; | |
634 | ||
635 | for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si)) | |
636 | { | |
637 | gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id); | |
638 | gimple_seq_add_stmt (&new_body, new_stmt); | |
639 | } | |
640 | ||
641 | return new_body; | |
642 | } | |
643 | ||
644 | ||
645 | /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its | |
646 | block using the mapping information in ID. */ | |
647 | ||
648 | static gimple | |
649 | copy_gimple_bind (gimple stmt, copy_body_data *id) | |
650 | { | |
651 | gimple new_bind; | |
652 | tree new_block, new_vars; | |
653 | gimple_seq body, new_body; | |
654 | ||
655 | /* Copy the statement. Note that we purposely don't use copy_stmt | |
656 | here because we need to remap statements as we copy. */ | |
657 | body = gimple_bind_body (stmt); | |
658 | new_body = remap_gimple_seq (body, id); | |
659 | ||
660 | new_block = gimple_bind_block (stmt); | |
661 | if (new_block) | |
662 | remap_block (&new_block, id); | |
663 | ||
664 | /* This will remap a lot of the same decls again, but this should be | |
665 | harmless. */ | |
666 | new_vars = gimple_bind_vars (stmt); | |
667 | if (new_vars) | |
526d73ab | 668 | new_vars = remap_decls (new_vars, NULL, id); |
726a989a RB |
669 | |
670 | new_bind = gimple_build_bind (new_vars, new_body, new_block); | |
671 | ||
672 | return new_bind; | |
673 | } | |
674 | ||
675 | ||
676 | /* Remap the GIMPLE operand pointed to by *TP. DATA is really a | |
677 | 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'. | |
678 | WALK_SUBTREES is used to indicate walk_gimple_op whether to keep | |
679 | recursing into the children nodes of *TP. */ | |
680 | ||
681 | static tree | |
682 | remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data) | |
683 | { | |
684 | struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data; | |
685 | copy_body_data *id = (copy_body_data *) wi_p->info; | |
686 | tree fn = id->src_fn; | |
687 | ||
688 | if (TREE_CODE (*tp) == SSA_NAME) | |
689 | { | |
690 | *tp = remap_ssa_name (*tp, id); | |
691 | *walk_subtrees = 0; | |
692 | return NULL; | |
693 | } | |
694 | else if (auto_var_in_fn_p (*tp, fn)) | |
695 | { | |
696 | /* Local variables and labels need to be replaced by equivalent | |
697 | variables. We don't want to copy static variables; there's | |
698 | only one of those, no matter how many times we inline the | |
699 | containing function. Similarly for globals from an outer | |
700 | function. */ | |
701 | tree new_decl; | |
702 | ||
703 | /* Remap the declaration. */ | |
704 | new_decl = remap_decl (*tp, id); | |
705 | gcc_assert (new_decl); | |
706 | /* Replace this variable with the copy. */ | |
707 | STRIP_TYPE_NOPS (new_decl); | |
708 | *tp = new_decl; | |
709 | *walk_subtrees = 0; | |
710 | } | |
711 | else if (TREE_CODE (*tp) == STATEMENT_LIST) | |
712 | gcc_unreachable (); | |
713 | else if (TREE_CODE (*tp) == SAVE_EXPR) | |
714 | gcc_unreachable (); | |
715 | else if (TREE_CODE (*tp) == LABEL_DECL | |
716 | && (!DECL_CONTEXT (*tp) | |
717 | || decl_function_context (*tp) == id->src_fn)) | |
718 | /* These may need to be remapped for EH handling. */ | |
719 | *tp = remap_decl (*tp, id); | |
720 | else if (TYPE_P (*tp)) | |
721 | /* Types may need remapping as well. */ | |
722 | *tp = remap_type (*tp, id); | |
723 | else if (CONSTANT_CLASS_P (*tp)) | |
724 | { | |
725 | /* If this is a constant, we have to copy the node iff the type | |
726 | will be remapped. copy_tree_r will not copy a constant. */ | |
727 | tree new_type = remap_type (TREE_TYPE (*tp), id); | |
728 | ||
729 | if (new_type == TREE_TYPE (*tp)) | |
730 | *walk_subtrees = 0; | |
731 | ||
732 | else if (TREE_CODE (*tp) == INTEGER_CST) | |
733 | *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp), | |
734 | TREE_INT_CST_HIGH (*tp)); | |
735 | else | |
736 | { | |
737 | *tp = copy_node (*tp); | |
738 | TREE_TYPE (*tp) = new_type; | |
739 | } | |
740 | } | |
741 | else | |
742 | { | |
743 | /* Otherwise, just copy the node. Note that copy_tree_r already | |
744 | knows not to copy VAR_DECLs, etc., so this is safe. */ | |
745 | if (TREE_CODE (*tp) == INDIRECT_REF) | |
746 | { | |
747 | /* Get rid of *& from inline substitutions that can happen when a | |
748 | pointer argument is an ADDR_EXPR. */ | |
749 | tree decl = TREE_OPERAND (*tp, 0); | |
750 | tree *n; | |
751 | ||
752 | n = (tree *) pointer_map_contains (id->decl_map, decl); | |
753 | if (n) | |
754 | { | |
82d6e6fc | 755 | tree type, new_tree, old; |
726a989a RB |
756 | |
757 | /* If we happen to get an ADDR_EXPR in n->value, strip | |
758 | it manually here as we'll eventually get ADDR_EXPRs | |
759 | which lie about their types pointed to. In this case | |
760 | build_fold_indirect_ref wouldn't strip the | |
761 | INDIRECT_REF, but we absolutely rely on that. As | |
762 | fold_indirect_ref does other useful transformations, | |
763 | try that first, though. */ | |
764 | type = TREE_TYPE (TREE_TYPE (*n)); | |
82d6e6fc | 765 | new_tree = unshare_expr (*n); |
726a989a | 766 | old = *tp; |
82d6e6fc | 767 | *tp = gimple_fold_indirect_ref (new_tree); |
726a989a RB |
768 | if (!*tp) |
769 | { | |
82d6e6fc | 770 | if (TREE_CODE (new_tree) == ADDR_EXPR) |
726a989a | 771 | { |
82d6e6fc | 772 | *tp = fold_indirect_ref_1 (type, new_tree); |
726a989a RB |
773 | /* ??? We should either assert here or build |
774 | a VIEW_CONVERT_EXPR instead of blindly leaking | |
775 | incompatible types to our IL. */ | |
776 | if (! *tp) | |
82d6e6fc | 777 | *tp = TREE_OPERAND (new_tree, 0); |
726a989a RB |
778 | } |
779 | else | |
780 | { | |
82d6e6fc | 781 | *tp = build1 (INDIRECT_REF, type, new_tree); |
726a989a | 782 | TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); |
ce1b6498 | 783 | TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old); |
726a989a RB |
784 | } |
785 | } | |
786 | *walk_subtrees = 0; | |
787 | return NULL; | |
788 | } | |
789 | } | |
790 | ||
791 | /* Here is the "usual case". Copy this tree node, and then | |
792 | tweak some special cases. */ | |
793 | copy_tree_r (tp, walk_subtrees, NULL); | |
794 | ||
795 | /* Global variables we haven't seen yet need to go into referenced | |
796 | vars. If not referenced from types only. */ | |
797 | if (gimple_in_ssa_p (cfun) | |
798 | && TREE_CODE (*tp) == VAR_DECL | |
799 | && id->remapping_type_depth == 0) | |
800 | add_referenced_var (*tp); | |
801 | ||
802 | /* We should never have TREE_BLOCK set on non-statements. */ | |
803 | if (EXPR_P (*tp)) | |
804 | gcc_assert (!TREE_BLOCK (*tp)); | |
805 | ||
806 | if (TREE_CODE (*tp) != OMP_CLAUSE) | |
807 | TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id); | |
808 | ||
809 | if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3)) | |
810 | { | |
811 | /* The copied TARGET_EXPR has never been expanded, even if the | |
812 | original node was expanded already. */ | |
813 | TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3); | |
814 | TREE_OPERAND (*tp, 3) = NULL_TREE; | |
815 | } | |
816 | else if (TREE_CODE (*tp) == ADDR_EXPR) | |
817 | { | |
818 | /* Variable substitution need not be simple. In particular, | |
819 | the INDIRECT_REF substitution above. Make sure that | |
820 | TREE_CONSTANT and friends are up-to-date. But make sure | |
821 | to not improperly set TREE_BLOCK on some sub-expressions. */ | |
822 | int invariant = is_gimple_min_invariant (*tp); | |
823 | tree block = id->block; | |
824 | id->block = NULL_TREE; | |
825 | walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL); | |
826 | id->block = block; | |
827 | ||
828 | /* Handle the case where we substituted an INDIRECT_REF | |
829 | into the operand of the ADDR_EXPR. */ | |
830 | if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF) | |
831 | *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0); | |
832 | else | |
833 | recompute_tree_invariant_for_addr_expr (*tp); | |
834 | ||
835 | /* If this used to be invariant, but is not any longer, | |
836 | then regimplification is probably needed. */ | |
837 | if (invariant && !is_gimple_min_invariant (*tp)) | |
838 | id->regimplify = true; | |
839 | ||
840 | *walk_subtrees = 0; | |
841 | } | |
842 | } | |
843 | ||
844 | /* Keep iterating. */ | |
845 | return NULL_TREE; | |
846 | } | |
847 | ||
848 | ||
849 | /* Called from copy_body_id via walk_tree. DATA is really a | |
1b369fae | 850 | `copy_body_data *'. */ |
aa4a53af | 851 | |
1b369fae | 852 | tree |
726a989a | 853 | copy_tree_body_r (tree *tp, int *walk_subtrees, void *data) |
d4e4baa9 | 854 | { |
1b369fae RH |
855 | copy_body_data *id = (copy_body_data *) data; |
856 | tree fn = id->src_fn; | |
acb8f212 | 857 | tree new_block; |
d4e4baa9 | 858 | |
e21aff8a SB |
859 | /* Begin by recognizing trees that we'll completely rewrite for the |
860 | inlining context. Our output for these trees is completely | |
861 | different from out input (e.g. RETURN_EXPR is deleted, and morphs | |
862 | into an edge). Further down, we'll handle trees that get | |
863 | duplicated and/or tweaked. */ | |
d4e4baa9 | 864 | |
1b369fae | 865 | /* When requested, RETURN_EXPRs should be transformed to just the |
726a989a | 866 | contained MODIFY_EXPR. The branch semantics of the return will |
1b369fae RH |
867 | be handled elsewhere by manipulating the CFG rather than a statement. */ |
868 | if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify) | |
d4e4baa9 | 869 | { |
e21aff8a | 870 | tree assignment = TREE_OPERAND (*tp, 0); |
d4e4baa9 AO |
871 | |
872 | /* If we're returning something, just turn that into an | |
e21aff8a SB |
873 | assignment into the equivalent of the original RESULT_DECL. |
874 | If the "assignment" is just the result decl, the result | |
875 | decl has already been set (e.g. a recent "foo (&result_decl, | |
876 | ...)"); just toss the entire RETURN_EXPR. */ | |
726a989a | 877 | if (assignment && TREE_CODE (assignment) == MODIFY_EXPR) |
e21aff8a SB |
878 | { |
879 | /* Replace the RETURN_EXPR with (a copy of) the | |
726a989a | 880 | MODIFY_EXPR hanging underneath. */ |
e21aff8a SB |
881 | *tp = copy_node (assignment); |
882 | } | |
883 | else /* Else the RETURN_EXPR returns no value. */ | |
884 | { | |
885 | *tp = NULL; | |
cceb1885 | 886 | return (tree) (void *)1; |
e21aff8a | 887 | } |
d4e4baa9 | 888 | } |
110cfe1c JH |
889 | else if (TREE_CODE (*tp) == SSA_NAME) |
890 | { | |
891 | *tp = remap_ssa_name (*tp, id); | |
892 | *walk_subtrees = 0; | |
893 | return NULL; | |
894 | } | |
e21aff8a | 895 | |
d4e4baa9 AO |
896 | /* Local variables and labels need to be replaced by equivalent |
897 | variables. We don't want to copy static variables; there's only | |
898 | one of those, no matter how many times we inline the containing | |
5377d5ba | 899 | function. Similarly for globals from an outer function. */ |
50886bf1 | 900 | else if (auto_var_in_fn_p (*tp, fn)) |
d4e4baa9 AO |
901 | { |
902 | tree new_decl; | |
903 | ||
904 | /* Remap the declaration. */ | |
905 | new_decl = remap_decl (*tp, id); | |
1e128c5f | 906 | gcc_assert (new_decl); |
d4e4baa9 AO |
907 | /* Replace this variable with the copy. */ |
908 | STRIP_TYPE_NOPS (new_decl); | |
909 | *tp = new_decl; | |
e4cf29ae | 910 | *walk_subtrees = 0; |
d4e4baa9 | 911 | } |
6de9cd9a DN |
912 | else if (TREE_CODE (*tp) == STATEMENT_LIST) |
913 | copy_statement_list (tp); | |
d4e4baa9 | 914 | else if (TREE_CODE (*tp) == SAVE_EXPR) |
82c82743 | 915 | remap_save_expr (tp, id->decl_map, walk_subtrees); |
17acc01a JH |
916 | else if (TREE_CODE (*tp) == LABEL_DECL |
917 | && (! DECL_CONTEXT (*tp) | |
1b369fae | 918 | || decl_function_context (*tp) == id->src_fn)) |
e21aff8a | 919 | /* These may need to be remapped for EH handling. */ |
17acc01a | 920 | *tp = remap_decl (*tp, id); |
6de9cd9a DN |
921 | else if (TREE_CODE (*tp) == BIND_EXPR) |
922 | copy_bind_expr (tp, walk_subtrees, id); | |
3c2a7a6a RH |
923 | /* Types may need remapping as well. */ |
924 | else if (TYPE_P (*tp)) | |
925 | *tp = remap_type (*tp, id); | |
926 | ||
bb04998a RK |
927 | /* If this is a constant, we have to copy the node iff the type will be |
928 | remapped. copy_tree_r will not copy a constant. */ | |
3cf11075 | 929 | else if (CONSTANT_CLASS_P (*tp)) |
bb04998a RK |
930 | { |
931 | tree new_type = remap_type (TREE_TYPE (*tp), id); | |
932 | ||
933 | if (new_type == TREE_TYPE (*tp)) | |
934 | *walk_subtrees = 0; | |
935 | ||
936 | else if (TREE_CODE (*tp) == INTEGER_CST) | |
937 | *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp), | |
938 | TREE_INT_CST_HIGH (*tp)); | |
939 | else | |
940 | { | |
941 | *tp = copy_node (*tp); | |
942 | TREE_TYPE (*tp) = new_type; | |
943 | } | |
944 | } | |
945 | ||
d4e4baa9 AO |
946 | /* Otherwise, just copy the node. Note that copy_tree_r already |
947 | knows not to copy VAR_DECLs, etc., so this is safe. */ | |
948 | else | |
949 | { | |
e21aff8a SB |
950 | /* Here we handle trees that are not completely rewritten. |
951 | First we detect some inlining-induced bogosities for | |
952 | discarding. */ | |
726a989a RB |
953 | if (TREE_CODE (*tp) == MODIFY_EXPR |
954 | && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1) | |
955 | && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn))) | |
d4e4baa9 AO |
956 | { |
957 | /* Some assignments VAR = VAR; don't generate any rtl code | |
958 | and thus don't count as variable modification. Avoid | |
959 | keeping bogosities like 0 = 0. */ | |
726a989a | 960 | tree decl = TREE_OPERAND (*tp, 0), value; |
6be42dd4 | 961 | tree *n; |
d4e4baa9 | 962 | |
6be42dd4 | 963 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
d4e4baa9 AO |
964 | if (n) |
965 | { | |
6be42dd4 | 966 | value = *n; |
d4e4baa9 | 967 | STRIP_TYPE_NOPS (value); |
becfd6e5 | 968 | if (TREE_CONSTANT (value) || TREE_READONLY (value)) |
68594ce7 | 969 | { |
b03c0b93 | 970 | *tp = build_empty_stmt (); |
726a989a | 971 | return copy_tree_body_r (tp, walk_subtrees, data); |
68594ce7 | 972 | } |
d4e4baa9 AO |
973 | } |
974 | } | |
1b369fae | 975 | else if (TREE_CODE (*tp) == INDIRECT_REF) |
6de9cd9a DN |
976 | { |
977 | /* Get rid of *& from inline substitutions that can happen when a | |
978 | pointer argument is an ADDR_EXPR. */ | |
81cfbbc2 | 979 | tree decl = TREE_OPERAND (*tp, 0); |
6be42dd4 | 980 | tree *n; |
6de9cd9a | 981 | |
6be42dd4 | 982 | n = (tree *) pointer_map_contains (id->decl_map, decl); |
6de9cd9a DN |
983 | if (n) |
984 | { | |
82d6e6fc | 985 | tree new_tree; |
d84b37b0 | 986 | tree old; |
30d2e943 RG |
987 | /* If we happen to get an ADDR_EXPR in n->value, strip |
988 | it manually here as we'll eventually get ADDR_EXPRs | |
989 | which lie about their types pointed to. In this case | |
990 | build_fold_indirect_ref wouldn't strip the INDIRECT_REF, | |
095ecc24 RG |
991 | but we absolutely rely on that. As fold_indirect_ref |
992 | does other useful transformations, try that first, though. */ | |
6be42dd4 | 993 | tree type = TREE_TYPE (TREE_TYPE (*n)); |
82d6e6fc | 994 | new_tree = unshare_expr (*n); |
d84b37b0 | 995 | old = *tp; |
82d6e6fc | 996 | *tp = gimple_fold_indirect_ref (new_tree); |
095ecc24 RG |
997 | if (! *tp) |
998 | { | |
82d6e6fc | 999 | if (TREE_CODE (new_tree) == ADDR_EXPR) |
de4af523 | 1000 | { |
82d6e6fc | 1001 | *tp = fold_indirect_ref_1 (type, new_tree); |
de4af523 JJ |
1002 | /* ??? We should either assert here or build |
1003 | a VIEW_CONVERT_EXPR instead of blindly leaking | |
1004 | incompatible types to our IL. */ | |
1005 | if (! *tp) | |
82d6e6fc | 1006 | *tp = TREE_OPERAND (new_tree, 0); |
de4af523 | 1007 | } |
095ecc24 | 1008 | else |
d84b37b0 | 1009 | { |
82d6e6fc | 1010 | *tp = build1 (INDIRECT_REF, type, new_tree); |
d84b37b0 | 1011 | TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old); |
955f6531 | 1012 | TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old); |
d84b37b0 | 1013 | } |
095ecc24 | 1014 | } |
81cfbbc2 JH |
1015 | *walk_subtrees = 0; |
1016 | return NULL; | |
68594ce7 JM |
1017 | } |
1018 | } | |
1019 | ||
e21aff8a SB |
1020 | /* Here is the "usual case". Copy this tree node, and then |
1021 | tweak some special cases. */ | |
1b369fae | 1022 | copy_tree_r (tp, walk_subtrees, NULL); |
110cfe1c | 1023 | |
4f5c64b8 RG |
1024 | /* Global variables we haven't seen yet needs to go into referenced |
1025 | vars. If not referenced from types only. */ | |
726a989a RB |
1026 | if (gimple_in_ssa_p (cfun) |
1027 | && TREE_CODE (*tp) == VAR_DECL | |
4f5c64b8 | 1028 | && id->remapping_type_depth == 0) |
110cfe1c | 1029 | add_referenced_var (*tp); |
19734dd8 | 1030 | |
acb8f212 JH |
1031 | /* If EXPR has block defined, map it to newly constructed block. |
1032 | When inlining we want EXPRs without block appear in the block | |
1033 | of function call. */ | |
726a989a | 1034 | if (EXPR_P (*tp)) |
acb8f212 JH |
1035 | { |
1036 | new_block = id->block; | |
1037 | if (TREE_BLOCK (*tp)) | |
1038 | { | |
6be42dd4 RG |
1039 | tree *n; |
1040 | n = (tree *) pointer_map_contains (id->decl_map, | |
1041 | TREE_BLOCK (*tp)); | |
acb8f212 | 1042 | gcc_assert (n); |
6be42dd4 | 1043 | new_block = *n; |
acb8f212 JH |
1044 | } |
1045 | TREE_BLOCK (*tp) = new_block; | |
1046 | } | |
68594ce7 | 1047 | |
e0704a46 | 1048 | if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset) |
e21aff8a | 1049 | TREE_OPERAND (*tp, 0) = |
726a989a RB |
1050 | build_int_cst (NULL_TREE, |
1051 | id->eh_region_offset | |
1052 | + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0))); | |
18c6ada9 | 1053 | |
726a989a | 1054 | if (TREE_CODE (*tp) != OMP_CLAUSE) |
07beea0d | 1055 | TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id); |
3c2a7a6a | 1056 | |
68594ce7 JM |
1057 | /* The copied TARGET_EXPR has never been expanded, even if the |
1058 | original node was expanded already. */ | |
1059 | if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3)) | |
1060 | { | |
1061 | TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3); | |
1062 | TREE_OPERAND (*tp, 3) = NULL_TREE; | |
1063 | } | |
84cce55d RH |
1064 | |
1065 | /* Variable substitution need not be simple. In particular, the | |
1066 | INDIRECT_REF substitution above. Make sure that TREE_CONSTANT | |
1067 | and friends are up-to-date. */ | |
1068 | else if (TREE_CODE (*tp) == ADDR_EXPR) | |
1069 | { | |
ad6003f2 | 1070 | int invariant = is_gimple_min_invariant (*tp); |
726a989a RB |
1071 | walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL); |
1072 | ||
8e85fd14 RG |
1073 | /* Handle the case where we substituted an INDIRECT_REF |
1074 | into the operand of the ADDR_EXPR. */ | |
1075 | if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF) | |
1076 | *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0); | |
1077 | else | |
1078 | recompute_tree_invariant_for_addr_expr (*tp); | |
726a989a | 1079 | |
416c991f JJ |
1080 | /* If this used to be invariant, but is not any longer, |
1081 | then regimplification is probably needed. */ | |
ad6003f2 | 1082 | if (invariant && !is_gimple_min_invariant (*tp)) |
416c991f | 1083 | id->regimplify = true; |
726a989a | 1084 | |
84cce55d RH |
1085 | *walk_subtrees = 0; |
1086 | } | |
d4e4baa9 AO |
1087 | } |
1088 | ||
1089 | /* Keep iterating. */ | |
1090 | return NULL_TREE; | |
1091 | } | |
1092 | ||
726a989a RB |
1093 | |
1094 | /* Helper for copy_bb. Remap statement STMT using the inlining | |
1095 | information in ID. Return the new statement copy. */ | |
1096 | ||
1097 | static gimple | |
1098 | remap_gimple_stmt (gimple stmt, copy_body_data *id) | |
1099 | { | |
1100 | gimple copy = NULL; | |
1101 | struct walk_stmt_info wi; | |
1102 | tree new_block; | |
5a6e26b7 | 1103 | bool skip_first = false; |
726a989a RB |
1104 | |
1105 | /* Begin by recognizing trees that we'll completely rewrite for the | |
1106 | inlining context. Our output for these trees is completely | |
1107 | different from out input (e.g. RETURN_EXPR is deleted, and morphs | |
1108 | into an edge). Further down, we'll handle trees that get | |
1109 | duplicated and/or tweaked. */ | |
1110 | ||
1111 | /* When requested, GIMPLE_RETURNs should be transformed to just the | |
1112 | contained GIMPLE_ASSIGN. The branch semantics of the return will | |
1113 | be handled elsewhere by manipulating the CFG rather than the | |
1114 | statement. */ | |
1115 | if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify) | |
1116 | { | |
1117 | tree retval = gimple_return_retval (stmt); | |
1118 | ||
1119 | /* If we're returning something, just turn that into an | |
1120 | assignment into the equivalent of the original RESULT_DECL. | |
1121 | If RETVAL is just the result decl, the result decl has | |
1122 | already been set (e.g. a recent "foo (&result_decl, ...)"); | |
1123 | just toss the entire GIMPLE_RETURN. */ | |
1124 | if (retval && TREE_CODE (retval) != RESULT_DECL) | |
5a6e26b7 JH |
1125 | { |
1126 | copy = gimple_build_assign (id->retvar, retval); | |
1127 | /* id->retvar is already substituted. Skip it on later remapping. */ | |
1128 | skip_first = true; | |
1129 | } | |
726a989a RB |
1130 | else |
1131 | return gimple_build_nop (); | |
1132 | } | |
1133 | else if (gimple_has_substatements (stmt)) | |
1134 | { | |
1135 | gimple_seq s1, s2; | |
1136 | ||
1137 | /* When cloning bodies from the C++ front end, we will be handed bodies | |
1138 | in High GIMPLE form. Handle here all the High GIMPLE statements that | |
1139 | have embedded statements. */ | |
1140 | switch (gimple_code (stmt)) | |
1141 | { | |
1142 | case GIMPLE_BIND: | |
1143 | copy = copy_gimple_bind (stmt, id); | |
1144 | break; | |
1145 | ||
1146 | case GIMPLE_CATCH: | |
1147 | s1 = remap_gimple_seq (gimple_catch_handler (stmt), id); | |
1148 | copy = gimple_build_catch (gimple_catch_types (stmt), s1); | |
1149 | break; | |
1150 | ||
1151 | case GIMPLE_EH_FILTER: | |
1152 | s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id); | |
1153 | copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1); | |
1154 | break; | |
1155 | ||
1156 | case GIMPLE_TRY: | |
1157 | s1 = remap_gimple_seq (gimple_try_eval (stmt), id); | |
1158 | s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id); | |
1159 | copy = gimple_build_try (s1, s2, gimple_try_kind (stmt)); | |
1160 | break; | |
1161 | ||
1162 | case GIMPLE_WITH_CLEANUP_EXPR: | |
1163 | s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id); | |
1164 | copy = gimple_build_wce (s1); | |
1165 | break; | |
1166 | ||
1167 | case GIMPLE_OMP_PARALLEL: | |
1168 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1169 | copy = gimple_build_omp_parallel | |
1170 | (s1, | |
1171 | gimple_omp_parallel_clauses (stmt), | |
1172 | gimple_omp_parallel_child_fn (stmt), | |
1173 | gimple_omp_parallel_data_arg (stmt)); | |
1174 | break; | |
1175 | ||
1176 | case GIMPLE_OMP_TASK: | |
1177 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1178 | copy = gimple_build_omp_task | |
1179 | (s1, | |
1180 | gimple_omp_task_clauses (stmt), | |
1181 | gimple_omp_task_child_fn (stmt), | |
1182 | gimple_omp_task_data_arg (stmt), | |
1183 | gimple_omp_task_copy_fn (stmt), | |
1184 | gimple_omp_task_arg_size (stmt), | |
1185 | gimple_omp_task_arg_align (stmt)); | |
1186 | break; | |
1187 | ||
1188 | case GIMPLE_OMP_FOR: | |
1189 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1190 | s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id); | |
1191 | copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt), | |
1192 | gimple_omp_for_collapse (stmt), s2); | |
1193 | { | |
1194 | size_t i; | |
1195 | for (i = 0; i < gimple_omp_for_collapse (stmt); i++) | |
1196 | { | |
1197 | gimple_omp_for_set_index (copy, i, | |
1198 | gimple_omp_for_index (stmt, i)); | |
1199 | gimple_omp_for_set_initial (copy, i, | |
1200 | gimple_omp_for_initial (stmt, i)); | |
1201 | gimple_omp_for_set_final (copy, i, | |
1202 | gimple_omp_for_final (stmt, i)); | |
1203 | gimple_omp_for_set_incr (copy, i, | |
1204 | gimple_omp_for_incr (stmt, i)); | |
1205 | gimple_omp_for_set_cond (copy, i, | |
1206 | gimple_omp_for_cond (stmt, i)); | |
1207 | } | |
1208 | } | |
1209 | break; | |
1210 | ||
1211 | case GIMPLE_OMP_MASTER: | |
1212 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1213 | copy = gimple_build_omp_master (s1); | |
1214 | break; | |
1215 | ||
1216 | case GIMPLE_OMP_ORDERED: | |
1217 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1218 | copy = gimple_build_omp_ordered (s1); | |
1219 | break; | |
1220 | ||
1221 | case GIMPLE_OMP_SECTION: | |
1222 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1223 | copy = gimple_build_omp_section (s1); | |
1224 | break; | |
1225 | ||
1226 | case GIMPLE_OMP_SECTIONS: | |
1227 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1228 | copy = gimple_build_omp_sections | |
1229 | (s1, gimple_omp_sections_clauses (stmt)); | |
1230 | break; | |
1231 | ||
1232 | case GIMPLE_OMP_SINGLE: | |
1233 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1234 | copy = gimple_build_omp_single | |
1235 | (s1, gimple_omp_single_clauses (stmt)); | |
1236 | break; | |
1237 | ||
05a26161 JJ |
1238 | case GIMPLE_OMP_CRITICAL: |
1239 | s1 = remap_gimple_seq (gimple_omp_body (stmt), id); | |
1240 | copy | |
1241 | = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt)); | |
1242 | break; | |
1243 | ||
726a989a RB |
1244 | default: |
1245 | gcc_unreachable (); | |
1246 | } | |
1247 | } | |
1248 | else | |
1249 | { | |
1250 | if (gimple_assign_copy_p (stmt) | |
1251 | && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt) | |
1252 | && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn)) | |
1253 | { | |
1254 | /* Here we handle statements that are not completely rewritten. | |
1255 | First we detect some inlining-induced bogosities for | |
1256 | discarding. */ | |
1257 | ||
1258 | /* Some assignments VAR = VAR; don't generate any rtl code | |
1259 | and thus don't count as variable modification. Avoid | |
1260 | keeping bogosities like 0 = 0. */ | |
1261 | tree decl = gimple_assign_lhs (stmt), value; | |
1262 | tree *n; | |
1263 | ||
1264 | n = (tree *) pointer_map_contains (id->decl_map, decl); | |
1265 | if (n) | |
1266 | { | |
1267 | value = *n; | |
1268 | STRIP_TYPE_NOPS (value); | |
1269 | if (TREE_CONSTANT (value) || TREE_READONLY (value)) | |
1270 | return gimple_build_nop (); | |
1271 | } | |
1272 | } | |
1273 | ||
1274 | /* Create a new deep copy of the statement. */ | |
1275 | copy = gimple_copy (stmt); | |
1276 | } | |
1277 | ||
1278 | /* If STMT has a block defined, map it to the newly constructed | |
1279 | block. When inlining we want statements without a block to | |
1280 | appear in the block of the function call. */ | |
1281 | new_block = id->block; | |
1282 | if (gimple_block (copy)) | |
1283 | { | |
1284 | tree *n; | |
1285 | n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy)); | |
1286 | gcc_assert (n); | |
1287 | new_block = *n; | |
1288 | } | |
1289 | ||
1290 | gimple_set_block (copy, new_block); | |
1291 | ||
1292 | /* Remap all the operands in COPY. */ | |
1293 | memset (&wi, 0, sizeof (wi)); | |
1294 | wi.info = id; | |
5a6e26b7 JH |
1295 | if (skip_first) |
1296 | walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL); | |
1297 | else | |
1298 | walk_gimple_op (copy, remap_gimple_op_r, &wi); | |
726a989a | 1299 | |
5006671f RG |
1300 | /* Clear the copied virtual operands. We are not remapping them here |
1301 | but are going to recreate them from scratch. */ | |
1302 | if (gimple_has_mem_ops (copy)) | |
1303 | { | |
1304 | gimple_set_vdef (copy, NULL_TREE); | |
1305 | gimple_set_vuse (copy, NULL_TREE); | |
1306 | } | |
1307 | ||
726a989a RB |
1308 | /* We have to handle EH region remapping of GIMPLE_RESX specially because |
1309 | the region number is not an operand. */ | |
1310 | if (gimple_code (stmt) == GIMPLE_RESX && id->eh_region_offset) | |
1311 | { | |
1312 | gimple_resx_set_region (copy, gimple_resx_region (stmt) + id->eh_region_offset); | |
1313 | } | |
1314 | return copy; | |
1315 | } | |
1316 | ||
1317 | ||
e21aff8a SB |
1318 | /* Copy basic block, scale profile accordingly. Edges will be taken care of |
1319 | later */ | |
1320 | ||
1321 | static basic_block | |
0178d644 VR |
1322 | copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, |
1323 | gcov_type count_scale) | |
e21aff8a | 1324 | { |
c2a4718a | 1325 | gimple_stmt_iterator gsi, copy_gsi, seq_gsi; |
e21aff8a | 1326 | basic_block copy_basic_block; |
726a989a | 1327 | tree decl; |
e21aff8a SB |
1328 | |
1329 | /* create_basic_block() will append every new block to | |
1330 | basic_block_info automatically. */ | |
cceb1885 GDR |
1331 | copy_basic_block = create_basic_block (NULL, (void *) 0, |
1332 | (basic_block) bb->prev_bb->aux); | |
e21aff8a | 1333 | copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE; |
45a80bb9 | 1334 | |
726a989a RB |
1335 | /* We are going to rebuild frequencies from scratch. These values |
1336 | have just small importance to drive canonicalize_loop_headers. */ | |
45a80bb9 | 1337 | copy_basic_block->frequency = ((gcov_type)bb->frequency |
726a989a RB |
1338 | * frequency_scale / REG_BR_PROB_BASE); |
1339 | ||
45a80bb9 JH |
1340 | if (copy_basic_block->frequency > BB_FREQ_MAX) |
1341 | copy_basic_block->frequency = BB_FREQ_MAX; | |
e21aff8a | 1342 | |
726a989a RB |
1343 | copy_gsi = gsi_start_bb (copy_basic_block); |
1344 | ||
1345 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
e21aff8a | 1346 | { |
726a989a RB |
1347 | gimple stmt = gsi_stmt (gsi); |
1348 | gimple orig_stmt = stmt; | |
e21aff8a | 1349 | |
416c991f | 1350 | id->regimplify = false; |
726a989a RB |
1351 | stmt = remap_gimple_stmt (stmt, id); |
1352 | if (gimple_nop_p (stmt)) | |
1353 | continue; | |
1354 | ||
1355 | gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt); | |
c2a4718a | 1356 | seq_gsi = copy_gsi; |
726a989a RB |
1357 | |
1358 | /* With return slot optimization we can end up with | |
1359 | non-gimple (foo *)&this->m, fix that here. */ | |
4c29307d JJ |
1360 | if (is_gimple_assign (stmt) |
1361 | && gimple_assign_rhs_code (stmt) == NOP_EXPR | |
1362 | && !is_gimple_val (gimple_assign_rhs1 (stmt))) | |
e21aff8a | 1363 | { |
726a989a | 1364 | tree new_rhs; |
c2a4718a | 1365 | new_rhs = force_gimple_operand_gsi (&seq_gsi, |
726a989a RB |
1366 | gimple_assign_rhs1 (stmt), |
1367 | true, NULL, true, GSI_SAME_STMT); | |
1368 | gimple_assign_set_rhs1 (stmt, new_rhs); | |
c2a4718a | 1369 | id->regimplify = false; |
726a989a | 1370 | } |
2b65dae5 | 1371 | |
c2a4718a JJ |
1372 | gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT); |
1373 | ||
1374 | if (id->regimplify) | |
1375 | gimple_regimplify_operands (stmt, &seq_gsi); | |
1376 | ||
1377 | /* If copy_basic_block has been empty at the start of this iteration, | |
1378 | call gsi_start_bb again to get at the newly added statements. */ | |
1379 | if (gsi_end_p (copy_gsi)) | |
1380 | copy_gsi = gsi_start_bb (copy_basic_block); | |
1381 | else | |
1382 | gsi_next (©_gsi); | |
110cfe1c | 1383 | |
726a989a RB |
1384 | /* Process the new statement. The call to gimple_regimplify_operands |
1385 | possibly turned the statement into multiple statements, we | |
1386 | need to process all of them. */ | |
c2a4718a | 1387 | do |
726a989a | 1388 | { |
c2a4718a | 1389 | stmt = gsi_stmt (copy_gsi); |
726a989a RB |
1390 | if (is_gimple_call (stmt) |
1391 | && gimple_call_va_arg_pack_p (stmt) | |
1392 | && id->gimple_call) | |
1393 | { | |
1394 | /* __builtin_va_arg_pack () should be replaced by | |
1395 | all arguments corresponding to ... in the caller. */ | |
1396 | tree p; | |
1397 | gimple new_call; | |
1398 | VEC(tree, heap) *argarray; | |
1399 | size_t nargs = gimple_call_num_args (id->gimple_call); | |
1400 | size_t n; | |
1401 | ||
1402 | for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p)) | |
1403 | nargs--; | |
1404 | ||
1405 | /* Create the new array of arguments. */ | |
1406 | n = nargs + gimple_call_num_args (stmt); | |
1407 | argarray = VEC_alloc (tree, heap, n); | |
1408 | VEC_safe_grow (tree, heap, argarray, n); | |
1409 | ||
1410 | /* Copy all the arguments before '...' */ | |
1411 | memcpy (VEC_address (tree, argarray), | |
1412 | gimple_call_arg_ptr (stmt, 0), | |
1413 | gimple_call_num_args (stmt) * sizeof (tree)); | |
1414 | ||
1415 | /* Append the arguments passed in '...' */ | |
1416 | memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt), | |
1417 | gimple_call_arg_ptr (id->gimple_call, 0) | |
1418 | + (gimple_call_num_args (id->gimple_call) - nargs), | |
1419 | nargs * sizeof (tree)); | |
1420 | ||
1421 | new_call = gimple_build_call_vec (gimple_call_fn (stmt), | |
1422 | argarray); | |
1423 | ||
1424 | VEC_free (tree, heap, argarray); | |
1425 | ||
1426 | /* Copy all GIMPLE_CALL flags, location and block, except | |
1427 | GF_CALL_VA_ARG_PACK. */ | |
1428 | gimple_call_copy_flags (new_call, stmt); | |
1429 | gimple_call_set_va_arg_pack (new_call, false); | |
1430 | gimple_set_location (new_call, gimple_location (stmt)); | |
1431 | gimple_set_block (new_call, gimple_block (stmt)); | |
1432 | gimple_call_set_lhs (new_call, gimple_call_lhs (stmt)); | |
1433 | ||
1434 | gsi_replace (©_gsi, new_call, false); | |
9cfa22be | 1435 | gimple_set_bb (stmt, NULL); |
726a989a RB |
1436 | stmt = new_call; |
1437 | } | |
1438 | else if (is_gimple_call (stmt) | |
1439 | && id->gimple_call | |
1440 | && (decl = gimple_call_fndecl (stmt)) | |
1441 | && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL | |
1442 | && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN) | |
e0704a46 | 1443 | { |
726a989a RB |
1444 | /* __builtin_va_arg_pack_len () should be replaced by |
1445 | the number of anonymous arguments. */ | |
1446 | size_t nargs = gimple_call_num_args (id->gimple_call); | |
1447 | tree count, p; | |
1448 | gimple new_stmt; | |
1449 | ||
1450 | for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p)) | |
1451 | nargs--; | |
1452 | ||
1453 | count = build_int_cst (integer_type_node, nargs); | |
1454 | new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count); | |
1455 | gsi_replace (©_gsi, new_stmt, false); | |
1456 | stmt = new_stmt; | |
1457 | } | |
b8a00a4d | 1458 | |
726a989a RB |
1459 | /* Statements produced by inlining can be unfolded, especially |
1460 | when we constant propagated some operands. We can't fold | |
1461 | them right now for two reasons: | |
1462 | 1) folding require SSA_NAME_DEF_STMTs to be correct | |
1463 | 2) we can't change function calls to builtins. | |
1464 | So we just mark statement for later folding. We mark | |
1465 | all new statements, instead just statements that has changed | |
1466 | by some nontrivial substitution so even statements made | |
1467 | foldable indirectly are updated. If this turns out to be | |
1468 | expensive, copy_body can be told to watch for nontrivial | |
1469 | changes. */ | |
1470 | if (id->statements_to_fold) | |
1471 | pointer_set_insert (id->statements_to_fold, stmt); | |
1472 | ||
1473 | /* We're duplicating a CALL_EXPR. Find any corresponding | |
1474 | callgraph edges and update or duplicate them. */ | |
1475 | if (is_gimple_call (stmt)) | |
1476 | { | |
1477 | struct cgraph_node *node; | |
1478 | struct cgraph_edge *edge; | |
f618d33e | 1479 | int flags; |
6ef5231b | 1480 | |
726a989a | 1481 | switch (id->transform_call_graph_edges) |
e0704a46 | 1482 | { |
726a989a RB |
1483 | case CB_CGE_DUPLICATE: |
1484 | edge = cgraph_edge (id->src_node, orig_stmt); | |
1485 | if (edge) | |
1486 | cgraph_clone_edge (edge, id->dst_node, stmt, | |
3e293154 MJ |
1487 | REG_BR_PROB_BASE, 1, |
1488 | edge->frequency, true); | |
726a989a RB |
1489 | break; |
1490 | ||
1491 | case CB_CGE_MOVE_CLONES: | |
1492 | for (node = id->dst_node->next_clone; | |
1493 | node; | |
1494 | node = node->next_clone) | |
1495 | { | |
1496 | edge = cgraph_edge (node, orig_stmt); | |
3e293154 MJ |
1497 | if (edge) |
1498 | cgraph_set_call_stmt (edge, stmt); | |
726a989a RB |
1499 | } |
1500 | /* FALLTHRU */ | |
110cfe1c | 1501 | |
726a989a RB |
1502 | case CB_CGE_MOVE: |
1503 | edge = cgraph_edge (id->dst_node, orig_stmt); | |
1504 | if (edge) | |
1505 | cgraph_set_call_stmt (edge, stmt); | |
1506 | break; | |
110cfe1c | 1507 | |
726a989a RB |
1508 | default: |
1509 | gcc_unreachable (); | |
110cfe1c | 1510 | } |
f618d33e MJ |
1511 | |
1512 | flags = gimple_call_flags (stmt); | |
1513 | ||
1514 | if (flags & ECF_MAY_BE_ALLOCA) | |
1515 | cfun->calls_alloca = true; | |
1516 | if (flags & ECF_RETURNS_TWICE) | |
1517 | cfun->calls_setjmp = true; | |
726a989a | 1518 | } |
e21aff8a | 1519 | |
726a989a RB |
1520 | /* If you think we can abort here, you are wrong. |
1521 | There is no region 0 in gimple. */ | |
1522 | gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) != 0); | |
1523 | ||
1524 | if (stmt_could_throw_p (stmt) | |
1525 | /* When we are cloning for inlining, we are supposed to | |
1526 | construct a clone that calls precisely the same functions | |
1527 | as original. However IPA optimizers might've proved | |
1528 | earlier some function calls as non-trapping that might | |
1529 | render some basic blocks dead that might become | |
1530 | unreachable. | |
1531 | ||
1532 | We can't update SSA with unreachable blocks in CFG and thus | |
1533 | we prevent the scenario by preserving even the "dead" eh | |
1534 | edges until the point they are later removed by | |
1535 | fixup_cfg pass. */ | |
1536 | || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES | |
1537 | && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0)) | |
1538 | { | |
1539 | int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt); | |
1540 | ||
1541 | /* Add an entry for the copied tree in the EH hashtable. | |
1542 | When cloning or versioning, use the hashtable in | |
1543 | cfun, and just copy the EH number. When inlining, use the | |
1544 | hashtable in the caller, and adjust the region number. */ | |
1545 | if (region > 0) | |
1546 | add_stmt_to_eh_region (stmt, region + id->eh_region_offset); | |
1547 | ||
1548 | /* If this tree doesn't have a region associated with it, | |
1549 | and there is a "current region," | |
1550 | then associate this tree with the current region | |
1551 | and add edges associated with this region. */ | |
1552 | if (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) <= 0 | |
1553 | && id->eh_region > 0 | |
1554 | && stmt_could_throw_p (stmt)) | |
1555 | add_stmt_to_eh_region (stmt, id->eh_region); | |
e21aff8a | 1556 | } |
726a989a RB |
1557 | |
1558 | if (gimple_in_ssa_p (cfun)) | |
1559 | { | |
1560 | ssa_op_iter i; | |
1561 | tree def; | |
1562 | ||
1563 | find_new_referenced_vars (gsi_stmt (copy_gsi)); | |
1564 | FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF) | |
1565 | if (TREE_CODE (def) == SSA_NAME) | |
1566 | SSA_NAME_DEF_STMT (def) = stmt; | |
1567 | } | |
1568 | ||
1569 | gsi_next (©_gsi); | |
e21aff8a | 1570 | } |
c2a4718a | 1571 | while (!gsi_end_p (copy_gsi)); |
726a989a RB |
1572 | |
1573 | copy_gsi = gsi_last_bb (copy_basic_block); | |
e21aff8a | 1574 | } |
726a989a | 1575 | |
e21aff8a SB |
1576 | return copy_basic_block; |
1577 | } | |
1578 | ||
110cfe1c JH |
1579 | /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA |
1580 | form is quite easy, since dominator relationship for old basic blocks does | |
1581 | not change. | |
1582 | ||
1583 | There is however exception where inlining might change dominator relation | |
1584 | across EH edges from basic block within inlined functions destinating | |
5305a4cb | 1585 | to landing pads in function we inline into. |
110cfe1c | 1586 | |
e9705dc5 AO |
1587 | The function fills in PHI_RESULTs of such PHI nodes if they refer |
1588 | to gimple regs. Otherwise, the function mark PHI_RESULT of such | |
1589 | PHI nodes for renaming. For non-gimple regs, renaming is safe: the | |
1590 | EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be | |
1591 | set, and this means that there will be no overlapping live ranges | |
110cfe1c JH |
1592 | for the underlying symbol. |
1593 | ||
1594 | This might change in future if we allow redirecting of EH edges and | |
1595 | we might want to change way build CFG pre-inlining to include | |
1596 | all the possible edges then. */ | |
1597 | static void | |
e9705dc5 AO |
1598 | update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb, |
1599 | bool can_throw, bool nonlocal_goto) | |
110cfe1c JH |
1600 | { |
1601 | edge e; | |
1602 | edge_iterator ei; | |
1603 | ||
1604 | FOR_EACH_EDGE (e, ei, bb->succs) | |
1605 | if (!e->dest->aux | |
1606 | || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK) | |
1607 | { | |
726a989a RB |
1608 | gimple phi; |
1609 | gimple_stmt_iterator si; | |
110cfe1c | 1610 | |
e9705dc5 | 1611 | gcc_assert (e->flags & EDGE_ABNORMAL); |
726a989a | 1612 | |
e9705dc5 AO |
1613 | if (!nonlocal_goto) |
1614 | gcc_assert (e->flags & EDGE_EH); | |
726a989a | 1615 | |
e9705dc5 AO |
1616 | if (!can_throw) |
1617 | gcc_assert (!(e->flags & EDGE_EH)); | |
726a989a RB |
1618 | |
1619 | for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si)) | |
110cfe1c | 1620 | { |
e9705dc5 AO |
1621 | edge re; |
1622 | ||
726a989a RB |
1623 | phi = gsi_stmt (si); |
1624 | ||
e9705dc5 AO |
1625 | /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */ |
1626 | gcc_assert (!e->dest->aux); | |
1627 | ||
726a989a | 1628 | gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi))); |
e9705dc5 AO |
1629 | |
1630 | if (!is_gimple_reg (PHI_RESULT (phi))) | |
1631 | { | |
726a989a | 1632 | mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi))); |
e9705dc5 AO |
1633 | continue; |
1634 | } | |
1635 | ||
1636 | re = find_edge (ret_bb, e->dest); | |
1432b19f | 1637 | gcc_assert (re); |
e9705dc5 AO |
1638 | gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL)) |
1639 | == (e->flags & (EDGE_EH | EDGE_ABNORMAL))); | |
1640 | ||
1641 | SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), | |
1642 | USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re))); | |
110cfe1c JH |
1643 | } |
1644 | } | |
1645 | } | |
1646 | ||
726a989a | 1647 | |
128a79fb KH |
1648 | /* Copy edges from BB into its copy constructed earlier, scale profile |
1649 | accordingly. Edges will be taken care of later. Assume aux | |
1650 | pointers to point to the copies of each BB. */ | |
726a989a | 1651 | |
e21aff8a | 1652 | static void |
0178d644 | 1653 | copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb) |
e21aff8a | 1654 | { |
cceb1885 | 1655 | basic_block new_bb = (basic_block) bb->aux; |
e21aff8a SB |
1656 | edge_iterator ei; |
1657 | edge old_edge; | |
726a989a | 1658 | gimple_stmt_iterator si; |
e21aff8a SB |
1659 | int flags; |
1660 | ||
1661 | /* Use the indices from the original blocks to create edges for the | |
1662 | new ones. */ | |
1663 | FOR_EACH_EDGE (old_edge, ei, bb->succs) | |
e0704a46 JH |
1664 | if (!(old_edge->flags & EDGE_EH)) |
1665 | { | |
82d6e6fc | 1666 | edge new_edge; |
e21aff8a | 1667 | |
e0704a46 | 1668 | flags = old_edge->flags; |
e21aff8a | 1669 | |
e0704a46 JH |
1670 | /* Return edges do get a FALLTHRU flag when the get inlined. */ |
1671 | if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags | |
1672 | && old_edge->dest->aux != EXIT_BLOCK_PTR) | |
1673 | flags |= EDGE_FALLTHRU; | |
82d6e6fc KG |
1674 | new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags); |
1675 | new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE; | |
1676 | new_edge->probability = old_edge->probability; | |
e0704a46 | 1677 | } |
e21aff8a SB |
1678 | |
1679 | if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK) | |
1680 | return; | |
1681 | ||
726a989a | 1682 | for (si = gsi_start_bb (new_bb); !gsi_end_p (si);) |
e21aff8a | 1683 | { |
726a989a | 1684 | gimple copy_stmt; |
e9705dc5 | 1685 | bool can_throw, nonlocal_goto; |
e21aff8a | 1686 | |
726a989a | 1687 | copy_stmt = gsi_stmt (si); |
e21aff8a | 1688 | update_stmt (copy_stmt); |
110cfe1c JH |
1689 | if (gimple_in_ssa_p (cfun)) |
1690 | mark_symbols_for_renaming (copy_stmt); | |
726a989a | 1691 | |
e21aff8a | 1692 | /* Do this before the possible split_block. */ |
726a989a | 1693 | gsi_next (&si); |
e21aff8a SB |
1694 | |
1695 | /* If this tree could throw an exception, there are two | |
1696 | cases where we need to add abnormal edge(s): the | |
1697 | tree wasn't in a region and there is a "current | |
1698 | region" in the caller; or the original tree had | |
1699 | EH edges. In both cases split the block after the tree, | |
1700 | and add abnormal edge(s) as needed; we need both | |
1701 | those from the callee and the caller. | |
1702 | We check whether the copy can throw, because the const | |
1703 | propagation can change an INDIRECT_REF which throws | |
1704 | into a COMPONENT_REF which doesn't. If the copy | |
1705 | can throw, the original could also throw. */ | |
726a989a RB |
1706 | can_throw = stmt_can_throw_internal (copy_stmt); |
1707 | nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt); | |
e9705dc5 AO |
1708 | |
1709 | if (can_throw || nonlocal_goto) | |
e21aff8a | 1710 | { |
726a989a | 1711 | if (!gsi_end_p (si)) |
e21aff8a SB |
1712 | /* Note that bb's predecessor edges aren't necessarily |
1713 | right at this point; split_block doesn't care. */ | |
1714 | { | |
1715 | edge e = split_block (new_bb, copy_stmt); | |
110cfe1c | 1716 | |
e21aff8a | 1717 | new_bb = e->dest; |
110cfe1c | 1718 | new_bb->aux = e->src->aux; |
726a989a | 1719 | si = gsi_start_bb (new_bb); |
e21aff8a | 1720 | } |
e9705dc5 | 1721 | } |
e21aff8a | 1722 | |
e9705dc5 AO |
1723 | if (can_throw) |
1724 | make_eh_edges (copy_stmt); | |
110cfe1c | 1725 | |
e9705dc5 | 1726 | if (nonlocal_goto) |
726a989a | 1727 | make_abnormal_goto_edges (gimple_bb (copy_stmt), true); |
e9705dc5 AO |
1728 | |
1729 | if ((can_throw || nonlocal_goto) | |
1730 | && gimple_in_ssa_p (cfun)) | |
726a989a | 1731 | update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb, |
e9705dc5 | 1732 | can_throw, nonlocal_goto); |
110cfe1c JH |
1733 | } |
1734 | } | |
1735 | ||
1736 | /* Copy the PHIs. All blocks and edges are copied, some blocks | |
1737 | was possibly split and new outgoing EH edges inserted. | |
1738 | BB points to the block of original function and AUX pointers links | |
1739 | the original and newly copied blocks. */ | |
1740 | ||
1741 | static void | |
1742 | copy_phis_for_bb (basic_block bb, copy_body_data *id) | |
1743 | { | |
3d9a9f94 | 1744 | basic_block const new_bb = (basic_block) bb->aux; |
110cfe1c | 1745 | edge_iterator ei; |
726a989a RB |
1746 | gimple phi; |
1747 | gimple_stmt_iterator si; | |
110cfe1c | 1748 | |
726a989a | 1749 | for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si)) |
110cfe1c | 1750 | { |
726a989a RB |
1751 | tree res, new_res; |
1752 | gimple new_phi; | |
110cfe1c JH |
1753 | edge new_edge; |
1754 | ||
726a989a RB |
1755 | phi = gsi_stmt (si); |
1756 | res = PHI_RESULT (phi); | |
1757 | new_res = res; | |
110cfe1c JH |
1758 | if (is_gimple_reg (res)) |
1759 | { | |
726a989a | 1760 | walk_tree (&new_res, copy_tree_body_r, id, NULL); |
110cfe1c JH |
1761 | SSA_NAME_DEF_STMT (new_res) |
1762 | = new_phi = create_phi_node (new_res, new_bb); | |
1763 | FOR_EACH_EDGE (new_edge, ei, new_bb->preds) | |
1764 | { | |
726a989a RB |
1765 | edge const old_edge |
1766 | = find_edge ((basic_block) new_edge->src->aux, bb); | |
110cfe1c JH |
1767 | tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge); |
1768 | tree new_arg = arg; | |
726a989a RB |
1769 | tree block = id->block; |
1770 | id->block = NULL_TREE; | |
1771 | walk_tree (&new_arg, copy_tree_body_r, id, NULL); | |
1772 | id->block = block; | |
110cfe1c | 1773 | gcc_assert (new_arg); |
36b6e793 JJ |
1774 | /* With return slot optimization we can end up with |
1775 | non-gimple (foo *)&this->m, fix that here. */ | |
1776 | if (TREE_CODE (new_arg) != SSA_NAME | |
1777 | && TREE_CODE (new_arg) != FUNCTION_DECL | |
1778 | && !is_gimple_val (new_arg)) | |
1779 | { | |
726a989a RB |
1780 | gimple_seq stmts = NULL; |
1781 | new_arg = force_gimple_operand (new_arg, &stmts, true, NULL); | |
1782 | gsi_insert_seq_on_edge_immediate (new_edge, stmts); | |
36b6e793 | 1783 | } |
110cfe1c JH |
1784 | add_phi_arg (new_phi, new_arg, new_edge); |
1785 | } | |
e21aff8a SB |
1786 | } |
1787 | } | |
1788 | } | |
1789 | ||
726a989a | 1790 | |
e21aff8a | 1791 | /* Wrapper for remap_decl so it can be used as a callback. */ |
726a989a | 1792 | |
e21aff8a SB |
1793 | static tree |
1794 | remap_decl_1 (tree decl, void *data) | |
1795 | { | |
1b369fae | 1796 | return remap_decl (decl, (copy_body_data *) data); |
e21aff8a SB |
1797 | } |
1798 | ||
110cfe1c JH |
1799 | /* Build struct function and associated datastructures for the new clone |
1800 | NEW_FNDECL to be build. CALLEE_FNDECL is the original */ | |
1801 | ||
1802 | static void | |
1803 | initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count, | |
1804 | int frequency) | |
1805 | { | |
110cfe1c | 1806 | struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
0178d644 | 1807 | gcov_type count_scale, frequency_scale; |
110cfe1c JH |
1808 | |
1809 | if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count) | |
1810 | count_scale = (REG_BR_PROB_BASE * count | |
1811 | / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count); | |
1812 | else | |
1813 | count_scale = 1; | |
1814 | ||
1815 | if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency) | |
1816 | frequency_scale = (REG_BR_PROB_BASE * frequency | |
1817 | / | |
1818 | ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency); | |
1819 | else | |
1820 | frequency_scale = count_scale; | |
1821 | ||
1822 | /* Register specific tree functions. */ | |
726a989a | 1823 | gimple_register_cfg_hooks (); |
39ecc018 JH |
1824 | |
1825 | /* Get clean struct function. */ | |
1826 | push_struct_function (new_fndecl); | |
1827 | ||
1828 | /* We will rebuild these, so just sanity check that they are empty. */ | |
1829 | gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL); | |
1830 | gcc_assert (cfun->local_decls == NULL); | |
1831 | gcc_assert (cfun->cfg == NULL); | |
1832 | gcc_assert (cfun->decl == new_fndecl); | |
1833 | ||
39ecc018 JH |
1834 | /* Copy items we preserve during clonning. */ |
1835 | cfun->static_chain_decl = src_cfun->static_chain_decl; | |
1836 | cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area; | |
1837 | cfun->function_end_locus = src_cfun->function_end_locus; | |
1838 | cfun->curr_properties = src_cfun->curr_properties; | |
1839 | cfun->last_verified = src_cfun->last_verified; | |
1840 | if (src_cfun->ipa_transforms_to_apply) | |
1841 | cfun->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap, | |
1842 | src_cfun->ipa_transforms_to_apply); | |
1843 | cfun->va_list_gpr_size = src_cfun->va_list_gpr_size; | |
1844 | cfun->va_list_fpr_size = src_cfun->va_list_fpr_size; | |
1845 | cfun->function_frequency = src_cfun->function_frequency; | |
1846 | cfun->has_nonlocal_label = src_cfun->has_nonlocal_label; | |
1847 | cfun->stdarg = src_cfun->stdarg; | |
1848 | cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p; | |
1849 | cfun->after_inlining = src_cfun->after_inlining; | |
1850 | cfun->returns_struct = src_cfun->returns_struct; | |
1851 | cfun->returns_pcc_struct = src_cfun->returns_pcc_struct; | |
1852 | cfun->after_tree_profile = src_cfun->after_tree_profile; | |
1853 | ||
110cfe1c JH |
1854 | init_empty_tree_cfg (); |
1855 | ||
1856 | ENTRY_BLOCK_PTR->count = | |
1857 | (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale / | |
1858 | REG_BR_PROB_BASE); | |
1859 | ENTRY_BLOCK_PTR->frequency = | |
1860 | (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency * | |
1861 | frequency_scale / REG_BR_PROB_BASE); | |
1862 | EXIT_BLOCK_PTR->count = | |
1863 | (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale / | |
1864 | REG_BR_PROB_BASE); | |
1865 | EXIT_BLOCK_PTR->frequency = | |
1866 | (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency * | |
1867 | frequency_scale / REG_BR_PROB_BASE); | |
1868 | if (src_cfun->eh) | |
1869 | init_eh_for_function (); | |
1870 | ||
1871 | if (src_cfun->gimple_df) | |
1872 | { | |
5db9ba0c | 1873 | init_tree_ssa (cfun); |
110cfe1c JH |
1874 | cfun->gimple_df->in_ssa_p = true; |
1875 | init_ssa_operands (); | |
1876 | } | |
1877 | pop_cfun (); | |
1878 | } | |
1879 | ||
e21aff8a SB |
1880 | /* Make a copy of the body of FN so that it can be inserted inline in |
1881 | another function. Walks FN via CFG, returns new fndecl. */ | |
1882 | ||
1883 | static tree | |
1b369fae | 1884 | copy_cfg_body (copy_body_data * id, gcov_type count, int frequency, |
e21aff8a SB |
1885 | basic_block entry_block_map, basic_block exit_block_map) |
1886 | { | |
1b369fae | 1887 | tree callee_fndecl = id->src_fn; |
e21aff8a | 1888 | /* Original cfun for the callee, doesn't change. */ |
1b369fae | 1889 | struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
110cfe1c | 1890 | struct function *cfun_to_copy; |
e21aff8a SB |
1891 | basic_block bb; |
1892 | tree new_fndecl = NULL; | |
0178d644 | 1893 | gcov_type count_scale, frequency_scale; |
110cfe1c | 1894 | int last; |
e21aff8a | 1895 | |
1b369fae | 1896 | if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count) |
e21aff8a | 1897 | count_scale = (REG_BR_PROB_BASE * count |
1b369fae | 1898 | / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count); |
e21aff8a SB |
1899 | else |
1900 | count_scale = 1; | |
1901 | ||
1b369fae | 1902 | if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency) |
e21aff8a SB |
1903 | frequency_scale = (REG_BR_PROB_BASE * frequency |
1904 | / | |
1b369fae | 1905 | ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency); |
e21aff8a SB |
1906 | else |
1907 | frequency_scale = count_scale; | |
1908 | ||
1909 | /* Register specific tree functions. */ | |
726a989a | 1910 | gimple_register_cfg_hooks (); |
e21aff8a SB |
1911 | |
1912 | /* Must have a CFG here at this point. */ | |
1913 | gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION | |
1914 | (DECL_STRUCT_FUNCTION (callee_fndecl))); | |
1915 | ||
110cfe1c JH |
1916 | cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl); |
1917 | ||
e21aff8a SB |
1918 | ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map; |
1919 | EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map; | |
110cfe1c JH |
1920 | entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy); |
1921 | exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy); | |
e21aff8a | 1922 | |
e21aff8a SB |
1923 | /* Duplicate any exception-handling regions. */ |
1924 | if (cfun->eh) | |
1925 | { | |
1b369fae | 1926 | id->eh_region_offset |
fad41cd7 RH |
1927 | = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id, |
1928 | 0, id->eh_region); | |
e21aff8a | 1929 | } |
726a989a | 1930 | |
e21aff8a SB |
1931 | /* Use aux pointers to map the original blocks to copy. */ |
1932 | FOR_EACH_BB_FN (bb, cfun_to_copy) | |
110cfe1c | 1933 | { |
82d6e6fc KG |
1934 | basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale); |
1935 | bb->aux = new_bb; | |
1936 | new_bb->aux = bb; | |
110cfe1c JH |
1937 | } |
1938 | ||
7c57be85 | 1939 | last = last_basic_block; |
726a989a | 1940 | |
e21aff8a SB |
1941 | /* Now that we've duplicated the blocks, duplicate their edges. */ |
1942 | FOR_ALL_BB_FN (bb, cfun_to_copy) | |
e9705dc5 | 1943 | copy_edges_for_bb (bb, count_scale, exit_block_map); |
726a989a | 1944 | |
110cfe1c JH |
1945 | if (gimple_in_ssa_p (cfun)) |
1946 | FOR_ALL_BB_FN (bb, cfun_to_copy) | |
1947 | copy_phis_for_bb (bb, id); | |
726a989a | 1948 | |
e21aff8a | 1949 | FOR_ALL_BB_FN (bb, cfun_to_copy) |
110cfe1c JH |
1950 | { |
1951 | ((basic_block)bb->aux)->aux = NULL; | |
1952 | bb->aux = NULL; | |
1953 | } | |
726a989a | 1954 | |
110cfe1c JH |
1955 | /* Zero out AUX fields of newly created block during EH edge |
1956 | insertion. */ | |
7c57be85 | 1957 | for (; last < last_basic_block; last++) |
110cfe1c JH |
1958 | BASIC_BLOCK (last)->aux = NULL; |
1959 | entry_block_map->aux = NULL; | |
1960 | exit_block_map->aux = NULL; | |
e21aff8a SB |
1961 | |
1962 | return new_fndecl; | |
1963 | } | |
1964 | ||
e21aff8a | 1965 | static tree |
1b369fae | 1966 | copy_body (copy_body_data *id, gcov_type count, int frequency, |
e21aff8a SB |
1967 | basic_block entry_block_map, basic_block exit_block_map) |
1968 | { | |
1b369fae | 1969 | tree fndecl = id->src_fn; |
e21aff8a SB |
1970 | tree body; |
1971 | ||
1972 | /* If this body has a CFG, walk CFG and copy. */ | |
1973 | gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl))); | |
1974 | body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map); | |
1975 | ||
1976 | return body; | |
1977 | } | |
1978 | ||
04482133 AO |
1979 | /* Return true if VALUE is an ADDR_EXPR of an automatic variable |
1980 | defined in function FN, or of a data member thereof. */ | |
1981 | ||
1982 | static bool | |
1983 | self_inlining_addr_expr (tree value, tree fn) | |
1984 | { | |
1985 | tree var; | |
1986 | ||
1987 | if (TREE_CODE (value) != ADDR_EXPR) | |
1988 | return false; | |
1989 | ||
1990 | var = get_base_address (TREE_OPERAND (value, 0)); | |
e21aff8a | 1991 | |
50886bf1 | 1992 | return var && auto_var_in_fn_p (var, fn); |
04482133 AO |
1993 | } |
1994 | ||
6de9cd9a | 1995 | static void |
0f1961a2 JH |
1996 | insert_init_stmt (basic_block bb, gimple init_stmt) |
1997 | { | |
0f1961a2 JH |
1998 | /* If VAR represents a zero-sized variable, it's possible that the |
1999 | assignment statement may result in no gimple statements. */ | |
2000 | if (init_stmt) | |
c2a4718a JJ |
2001 | { |
2002 | gimple_stmt_iterator si = gsi_last_bb (bb); | |
0f1961a2 | 2003 | |
bfb0b886 RG |
2004 | /* We can end up with init statements that store to a non-register |
2005 | from a rhs with a conversion. Handle that here by forcing the | |
2006 | rhs into a temporary. gimple_regimplify_operands is not | |
2007 | prepared to do this for us. */ | |
2008 | if (!is_gimple_reg (gimple_assign_lhs (init_stmt)) | |
2009 | && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt))) | |
2010 | && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS) | |
2011 | { | |
2012 | tree rhs = build1 (gimple_assign_rhs_code (init_stmt), | |
2013 | gimple_expr_type (init_stmt), | |
2014 | gimple_assign_rhs1 (init_stmt)); | |
2015 | rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false, | |
2016 | GSI_NEW_STMT); | |
2017 | gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs)); | |
2018 | gimple_assign_set_rhs1 (init_stmt, rhs); | |
2019 | } | |
c2a4718a JJ |
2020 | gsi_insert_after (&si, init_stmt, GSI_NEW_STMT); |
2021 | gimple_regimplify_operands (init_stmt, &si); | |
2022 | mark_symbols_for_renaming (init_stmt); | |
2023 | } | |
0f1961a2 JH |
2024 | } |
2025 | ||
2026 | /* Initialize parameter P with VALUE. If needed, produce init statement | |
2027 | at the end of BB. When BB is NULL, we return init statement to be | |
2028 | output later. */ | |
2029 | static gimple | |
1b369fae | 2030 | setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn, |
e21aff8a | 2031 | basic_block bb, tree *vars) |
6de9cd9a | 2032 | { |
0f1961a2 | 2033 | gimple init_stmt = NULL; |
6de9cd9a | 2034 | tree var; |
f4088621 | 2035 | tree rhs = value; |
110cfe1c JH |
2036 | tree def = (gimple_in_ssa_p (cfun) |
2037 | ? gimple_default_def (id->src_cfun, p) : NULL); | |
6de9cd9a | 2038 | |
f4088621 RG |
2039 | if (value |
2040 | && value != error_mark_node | |
2041 | && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))) | |
c54e3854 RG |
2042 | { |
2043 | if (fold_convertible_p (TREE_TYPE (p), value)) | |
2044 | rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value); | |
2045 | else | |
2046 | /* ??? For valid (GIMPLE) programs we should not end up here. | |
2047 | Still if something has gone wrong and we end up with truly | |
2048 | mismatched types here, fall back to using a VIEW_CONVERT_EXPR | |
2049 | to not leak invalid GIMPLE to the following passes. */ | |
2050 | rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value); | |
2051 | } | |
f4088621 | 2052 | |
110cfe1c JH |
2053 | /* If the parameter is never assigned to, has no SSA_NAMEs created, |
2054 | we may not need to create a new variable here at all. Instead, we may | |
2055 | be able to just use the argument value. */ | |
6de9cd9a DN |
2056 | if (TREE_READONLY (p) |
2057 | && !TREE_ADDRESSABLE (p) | |
110cfe1c JH |
2058 | && value && !TREE_SIDE_EFFECTS (value) |
2059 | && !def) | |
6de9cd9a | 2060 | { |
84936f6f RH |
2061 | /* We may produce non-gimple trees by adding NOPs or introduce |
2062 | invalid sharing when operand is not really constant. | |
2063 | It is not big deal to prohibit constant propagation here as | |
2064 | we will constant propagate in DOM1 pass anyway. */ | |
2065 | if (is_gimple_min_invariant (value) | |
f4088621 RG |
2066 | && useless_type_conversion_p (TREE_TYPE (p), |
2067 | TREE_TYPE (value)) | |
04482133 AO |
2068 | /* We have to be very careful about ADDR_EXPR. Make sure |
2069 | the base variable isn't a local variable of the inlined | |
2070 | function, e.g., when doing recursive inlining, direct or | |
2071 | mutually-recursive or whatever, which is why we don't | |
2072 | just test whether fn == current_function_decl. */ | |
2073 | && ! self_inlining_addr_expr (value, fn)) | |
6de9cd9a | 2074 | { |
6de9cd9a | 2075 | insert_decl_map (id, p, value); |
0f1961a2 | 2076 | return NULL; |
6de9cd9a DN |
2077 | } |
2078 | } | |
2079 | ||
5377d5ba RK |
2080 | /* Make an equivalent VAR_DECL. Note that we must NOT remap the type |
2081 | here since the type of this decl must be visible to the calling | |
8c27b7d4 | 2082 | function. */ |
1b369fae | 2083 | var = copy_decl_to_var (p, id); |
110cfe1c JH |
2084 | if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL) |
2085 | { | |
2086 | get_var_ann (var); | |
2087 | add_referenced_var (var); | |
2088 | } | |
e21aff8a | 2089 | |
6de9cd9a DN |
2090 | /* Register the VAR_DECL as the equivalent for the PARM_DECL; |
2091 | that way, when the PARM_DECL is encountered, it will be | |
2092 | automatically replaced by the VAR_DECL. */ | |
7c7d3047 | 2093 | insert_decl_map (id, p, var); |
6de9cd9a DN |
2094 | |
2095 | /* Declare this new variable. */ | |
2096 | TREE_CHAIN (var) = *vars; | |
2097 | *vars = var; | |
2098 | ||
2099 | /* Make gimplifier happy about this variable. */ | |
84936f6f | 2100 | DECL_SEEN_IN_BIND_EXPR_P (var) = 1; |
6de9cd9a DN |
2101 | |
2102 | /* Even if P was TREE_READONLY, the new VAR should not be. | |
2103 | In the original code, we would have constructed a | |
2104 | temporary, and then the function body would have never | |
2105 | changed the value of P. However, now, we will be | |
2106 | constructing VAR directly. The constructor body may | |
2107 | change its value multiple times as it is being | |
2108 | constructed. Therefore, it must not be TREE_READONLY; | |
2109 | the back-end assumes that TREE_READONLY variable is | |
2110 | assigned to only once. */ | |
2111 | if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p))) | |
2112 | TREE_READONLY (var) = 0; | |
2113 | ||
110cfe1c JH |
2114 | /* If there is no setup required and we are in SSA, take the easy route |
2115 | replacing all SSA names representing the function parameter by the | |
2116 | SSA name passed to function. | |
2117 | ||
2118 | We need to construct map for the variable anyway as it might be used | |
2119 | in different SSA names when parameter is set in function. | |
2120 | ||
8454d27e JH |
2121 | Do replacement at -O0 for const arguments replaced by constant. |
2122 | This is important for builtin_constant_p and other construct requiring | |
2123 | constant argument to be visible in inlined function body. | |
2124 | ||
110cfe1c JH |
2125 | FIXME: This usually kills the last connection in between inlined |
2126 | function parameter and the actual value in debug info. Can we do | |
2127 | better here? If we just inserted the statement, copy propagation | |
2128 | would kill it anyway as it always did in older versions of GCC. | |
2129 | ||
2130 | We might want to introduce a notion that single SSA_NAME might | |
2131 | represent multiple variables for purposes of debugging. */ | |
2132 | if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p) | |
8454d27e JH |
2133 | && (optimize |
2134 | || (TREE_READONLY (p) | |
2135 | && is_gimple_min_invariant (rhs))) | |
110cfe1c | 2136 | && (TREE_CODE (rhs) == SSA_NAME |
9b718f81 JH |
2137 | || is_gimple_min_invariant (rhs)) |
2138 | && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)) | |
110cfe1c JH |
2139 | { |
2140 | insert_decl_map (id, def, rhs); | |
0f1961a2 | 2141 | return NULL; |
110cfe1c JH |
2142 | } |
2143 | ||
f6f2da7d JH |
2144 | /* If the value of argument is never used, don't care about initializing |
2145 | it. */ | |
1cf5abb3 | 2146 | if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p)) |
f6f2da7d JH |
2147 | { |
2148 | gcc_assert (!value || !TREE_SIDE_EFFECTS (value)); | |
0f1961a2 | 2149 | return NULL; |
f6f2da7d JH |
2150 | } |
2151 | ||
6de9cd9a DN |
2152 | /* Initialize this VAR_DECL from the equivalent argument. Convert |
2153 | the argument to the proper type in case it was promoted. */ | |
2154 | if (value) | |
2155 | { | |
6de9cd9a | 2156 | if (rhs == error_mark_node) |
110cfe1c | 2157 | { |
7c7d3047 | 2158 | insert_decl_map (id, p, var); |
0f1961a2 | 2159 | return NULL; |
110cfe1c | 2160 | } |
afe08db5 | 2161 | |
73dab33b | 2162 | STRIP_USELESS_TYPE_CONVERSION (rhs); |
6de9cd9a | 2163 | |
726a989a | 2164 | /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we |
6de9cd9a | 2165 | keep our trees in gimple form. */ |
110cfe1c JH |
2166 | if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p)) |
2167 | { | |
2168 | def = remap_ssa_name (def, id); | |
726a989a | 2169 | init_stmt = gimple_build_assign (def, rhs); |
110cfe1c JH |
2170 | SSA_NAME_IS_DEFAULT_DEF (def) = 0; |
2171 | set_default_def (var, NULL); | |
2172 | } | |
2173 | else | |
726a989a | 2174 | init_stmt = gimple_build_assign (var, rhs); |
6de9cd9a | 2175 | |
0f1961a2 JH |
2176 | if (bb && init_stmt) |
2177 | insert_init_stmt (bb, init_stmt); | |
6de9cd9a | 2178 | } |
0f1961a2 | 2179 | return init_stmt; |
6de9cd9a DN |
2180 | } |
2181 | ||
d4e4baa9 | 2182 | /* Generate code to initialize the parameters of the function at the |
726a989a | 2183 | top of the stack in ID from the GIMPLE_CALL STMT. */ |
d4e4baa9 | 2184 | |
e21aff8a | 2185 | static void |
726a989a | 2186 | initialize_inlined_parameters (copy_body_data *id, gimple stmt, |
e21aff8a | 2187 | tree fn, basic_block bb) |
d4e4baa9 | 2188 | { |
d4e4baa9 | 2189 | tree parms; |
726a989a | 2190 | size_t i; |
d4e4baa9 | 2191 | tree p; |
d436bff8 | 2192 | tree vars = NULL_TREE; |
726a989a | 2193 | tree static_chain = gimple_call_chain (stmt); |
d4e4baa9 AO |
2194 | |
2195 | /* Figure out what the parameters are. */ | |
18c6ada9 | 2196 | parms = DECL_ARGUMENTS (fn); |
d4e4baa9 | 2197 | |
d4e4baa9 AO |
2198 | /* Loop through the parameter declarations, replacing each with an |
2199 | equivalent VAR_DECL, appropriately initialized. */ | |
726a989a RB |
2200 | for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++) |
2201 | { | |
2202 | tree val; | |
2203 | val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL; | |
2204 | setup_one_parameter (id, p, val, fn, bb, &vars); | |
2205 | } | |
4838c5ee | 2206 | |
6de9cd9a DN |
2207 | /* Initialize the static chain. */ |
2208 | p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl; | |
ea99e0be | 2209 | gcc_assert (fn != current_function_decl); |
6de9cd9a DN |
2210 | if (p) |
2211 | { | |
2212 | /* No static chain? Seems like a bug in tree-nested.c. */ | |
1e128c5f | 2213 | gcc_assert (static_chain); |
4838c5ee | 2214 | |
e21aff8a | 2215 | setup_one_parameter (id, p, static_chain, fn, bb, &vars); |
4838c5ee AO |
2216 | } |
2217 | ||
e21aff8a | 2218 | declare_inline_vars (id->block, vars); |
d4e4baa9 AO |
2219 | } |
2220 | ||
726a989a | 2221 | |
e21aff8a SB |
2222 | /* Declare a return variable to replace the RESULT_DECL for the |
2223 | function we are calling. An appropriate DECL_STMT is returned. | |
2224 | The USE_STMT is filled to contain a use of the declaration to | |
2225 | indicate the return value of the function. | |
2226 | ||
110cfe1c JH |
2227 | RETURN_SLOT, if non-null is place where to store the result. It |
2228 | is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null, | |
726a989a | 2229 | was the LHS of the MODIFY_EXPR to which this call is the RHS. |
7740f00d RH |
2230 | |
2231 | The return value is a (possibly null) value that is the result of the | |
2232 | function as seen by the callee. *USE_P is a (possibly null) value that | |
2233 | holds the result as seen by the caller. */ | |
d4e4baa9 | 2234 | |
d436bff8 | 2235 | static tree |
110cfe1c JH |
2236 | declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest, |
2237 | tree *use_p) | |
d4e4baa9 | 2238 | { |
1b369fae RH |
2239 | tree callee = id->src_fn; |
2240 | tree caller = id->dst_fn; | |
7740f00d RH |
2241 | tree result = DECL_RESULT (callee); |
2242 | tree callee_type = TREE_TYPE (result); | |
2243 | tree caller_type = TREE_TYPE (TREE_TYPE (callee)); | |
2244 | tree var, use; | |
d4e4baa9 AO |
2245 | |
2246 | /* We don't need to do anything for functions that don't return | |
2247 | anything. */ | |
7740f00d | 2248 | if (!result || VOID_TYPE_P (callee_type)) |
d4e4baa9 | 2249 | { |
6de9cd9a | 2250 | *use_p = NULL_TREE; |
d4e4baa9 AO |
2251 | return NULL_TREE; |
2252 | } | |
2253 | ||
cc77ae10 | 2254 | /* If there was a return slot, then the return value is the |
7740f00d | 2255 | dereferenced address of that object. */ |
110cfe1c | 2256 | if (return_slot) |
7740f00d | 2257 | { |
110cfe1c | 2258 | /* The front end shouldn't have used both return_slot and |
7740f00d | 2259 | a modify expression. */ |
1e128c5f | 2260 | gcc_assert (!modify_dest); |
cc77ae10 | 2261 | if (DECL_BY_REFERENCE (result)) |
110cfe1c JH |
2262 | { |
2263 | tree return_slot_addr = build_fold_addr_expr (return_slot); | |
2264 | STRIP_USELESS_TYPE_CONVERSION (return_slot_addr); | |
2265 | ||
2266 | /* We are going to construct *&return_slot and we can't do that | |
2267 | for variables believed to be not addressable. | |
2268 | ||
2269 | FIXME: This check possibly can match, because values returned | |
2270 | via return slot optimization are not believed to have address | |
2271 | taken by alias analysis. */ | |
2272 | gcc_assert (TREE_CODE (return_slot) != SSA_NAME); | |
2273 | if (gimple_in_ssa_p (cfun)) | |
2274 | { | |
2275 | HOST_WIDE_INT bitsize; | |
2276 | HOST_WIDE_INT bitpos; | |
2277 | tree offset; | |
2278 | enum machine_mode mode; | |
2279 | int unsignedp; | |
2280 | int volatilep; | |
2281 | tree base; | |
2282 | base = get_inner_reference (return_slot, &bitsize, &bitpos, | |
2283 | &offset, | |
2284 | &mode, &unsignedp, &volatilep, | |
2285 | false); | |
2286 | if (TREE_CODE (base) == INDIRECT_REF) | |
2287 | base = TREE_OPERAND (base, 0); | |
2288 | if (TREE_CODE (base) == SSA_NAME) | |
2289 | base = SSA_NAME_VAR (base); | |
2290 | mark_sym_for_renaming (base); | |
2291 | } | |
2292 | var = return_slot_addr; | |
2293 | } | |
cc77ae10 | 2294 | else |
110cfe1c JH |
2295 | { |
2296 | var = return_slot; | |
2297 | gcc_assert (TREE_CODE (var) != SSA_NAME); | |
b5ca517c | 2298 | TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result); |
110cfe1c | 2299 | } |
0890b981 AP |
2300 | if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE |
2301 | || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE) | |
2302 | && !DECL_GIMPLE_REG_P (result) | |
22918034 | 2303 | && DECL_P (var)) |
0890b981 | 2304 | DECL_GIMPLE_REG_P (var) = 0; |
7740f00d RH |
2305 | use = NULL; |
2306 | goto done; | |
2307 | } | |
2308 | ||
2309 | /* All types requiring non-trivial constructors should have been handled. */ | |
1e128c5f | 2310 | gcc_assert (!TREE_ADDRESSABLE (callee_type)); |
7740f00d RH |
2311 | |
2312 | /* Attempt to avoid creating a new temporary variable. */ | |
110cfe1c JH |
2313 | if (modify_dest |
2314 | && TREE_CODE (modify_dest) != SSA_NAME) | |
7740f00d RH |
2315 | { |
2316 | bool use_it = false; | |
2317 | ||
2318 | /* We can't use MODIFY_DEST if there's type promotion involved. */ | |
f4088621 | 2319 | if (!useless_type_conversion_p (callee_type, caller_type)) |
7740f00d RH |
2320 | use_it = false; |
2321 | ||
2322 | /* ??? If we're assigning to a variable sized type, then we must | |
2323 | reuse the destination variable, because we've no good way to | |
2324 | create variable sized temporaries at this point. */ | |
2325 | else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST) | |
2326 | use_it = true; | |
2327 | ||
2328 | /* If the callee cannot possibly modify MODIFY_DEST, then we can | |
2329 | reuse it as the result of the call directly. Don't do this if | |
2330 | it would promote MODIFY_DEST to addressable. */ | |
e2f9fe42 RH |
2331 | else if (TREE_ADDRESSABLE (result)) |
2332 | use_it = false; | |
2333 | else | |
2334 | { | |
2335 | tree base_m = get_base_address (modify_dest); | |
2336 | ||
2337 | /* If the base isn't a decl, then it's a pointer, and we don't | |
2338 | know where that's going to go. */ | |
2339 | if (!DECL_P (base_m)) | |
2340 | use_it = false; | |
2341 | else if (is_global_var (base_m)) | |
2342 | use_it = false; | |
0890b981 AP |
2343 | else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE |
2344 | || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE) | |
2345 | && !DECL_GIMPLE_REG_P (result) | |
2346 | && DECL_GIMPLE_REG_P (base_m)) | |
1d327c16 | 2347 | use_it = false; |
e2f9fe42 RH |
2348 | else if (!TREE_ADDRESSABLE (base_m)) |
2349 | use_it = true; | |
2350 | } | |
7740f00d RH |
2351 | |
2352 | if (use_it) | |
2353 | { | |
2354 | var = modify_dest; | |
2355 | use = NULL; | |
2356 | goto done; | |
2357 | } | |
2358 | } | |
2359 | ||
1e128c5f | 2360 | gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST); |
7740f00d | 2361 | |
c08cd4c1 | 2362 | var = copy_result_decl_to_var (result, id); |
110cfe1c JH |
2363 | if (gimple_in_ssa_p (cfun)) |
2364 | { | |
2365 | get_var_ann (var); | |
2366 | add_referenced_var (var); | |
2367 | } | |
e21aff8a | 2368 | |
7740f00d | 2369 | DECL_SEEN_IN_BIND_EXPR_P (var) = 1; |
cb91fab0 | 2370 | DECL_STRUCT_FUNCTION (caller)->local_decls |
7740f00d | 2371 | = tree_cons (NULL_TREE, var, |
cb91fab0 | 2372 | DECL_STRUCT_FUNCTION (caller)->local_decls); |
7740f00d | 2373 | |
6de9cd9a | 2374 | /* Do not have the rest of GCC warn about this variable as it should |
471854f8 | 2375 | not be visible to the user. */ |
6de9cd9a | 2376 | TREE_NO_WARNING (var) = 1; |
d4e4baa9 | 2377 | |
c08cd4c1 JM |
2378 | declare_inline_vars (id->block, var); |
2379 | ||
7740f00d RH |
2380 | /* Build the use expr. If the return type of the function was |
2381 | promoted, convert it back to the expected type. */ | |
2382 | use = var; | |
f4088621 | 2383 | if (!useless_type_conversion_p (caller_type, TREE_TYPE (var))) |
7740f00d | 2384 | use = fold_convert (caller_type, var); |
73dab33b AP |
2385 | |
2386 | STRIP_USELESS_TYPE_CONVERSION (use); | |
7740f00d | 2387 | |
c08cd4c1 | 2388 | if (DECL_BY_REFERENCE (result)) |
32848948 RG |
2389 | { |
2390 | TREE_ADDRESSABLE (var) = 1; | |
2391 | var = build_fold_addr_expr (var); | |
2392 | } | |
c08cd4c1 | 2393 | |
7740f00d | 2394 | done: |
d4e4baa9 AO |
2395 | /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that |
2396 | way, when the RESULT_DECL is encountered, it will be | |
2397 | automatically replaced by the VAR_DECL. */ | |
5e20bdd7 | 2398 | insert_decl_map (id, result, var); |
d4e4baa9 | 2399 | |
6de9cd9a DN |
2400 | /* Remember this so we can ignore it in remap_decls. */ |
2401 | id->retvar = var; | |
2402 | ||
7740f00d RH |
2403 | *use_p = use; |
2404 | return var; | |
d4e4baa9 AO |
2405 | } |
2406 | ||
0e9e1e0a | 2407 | /* Returns nonzero if a function can be inlined as a tree. */ |
4838c5ee | 2408 | |
b3c3af2f SB |
2409 | bool |
2410 | tree_inlinable_function_p (tree fn) | |
4838c5ee | 2411 | { |
658344f2 | 2412 | return inlinable_function_p (fn); |
726a989a RB |
2413 | } |
2414 | ||
2415 | static const char *inline_forbidden_reason; | |
2416 | ||
2417 | /* A callback for walk_gimple_seq to handle tree operands. Returns | |
2418 | NULL_TREE if a function can be inlined, otherwise sets the reason | |
2419 | why not and returns a tree representing the offending operand. */ | |
2420 | ||
2421 | static tree | |
2422 | inline_forbidden_p_op (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED, | |
2423 | void *fnp ATTRIBUTE_UNUSED) | |
2424 | { | |
2425 | tree node = *nodep; | |
2426 | tree t; | |
2427 | ||
2428 | if (TREE_CODE (node) == RECORD_TYPE || TREE_CODE (node) == UNION_TYPE) | |
2429 | { | |
2430 | /* We cannot inline a function of the form | |
2431 | ||
2432 | void F (int i) { struct S { int ar[i]; } s; } | |
2433 | ||
2434 | Attempting to do so produces a catch-22. | |
2435 | If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/ | |
2436 | UNION_TYPE nodes, then it goes into infinite recursion on a | |
2437 | structure containing a pointer to its own type. If it doesn't, | |
2438 | then the type node for S doesn't get adjusted properly when | |
2439 | F is inlined. | |
2440 | ||
2441 | ??? This is likely no longer true, but it's too late in the 4.0 | |
2442 | cycle to try to find out. This should be checked for 4.1. */ | |
2443 | for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t)) | |
2444 | if (variably_modified_type_p (TREE_TYPE (t), NULL)) | |
2445 | { | |
2446 | inline_forbidden_reason | |
2447 | = G_("function %q+F can never be inlined " | |
2448 | "because it uses variable sized variables"); | |
2449 | return node; | |
2450 | } | |
2451 | } | |
2452 | ||
2453 | return NULL_TREE; | |
4838c5ee AO |
2454 | } |
2455 | ||
726a989a RB |
2456 | |
2457 | /* A callback for walk_gimple_seq to handle statements. Returns | |
2458 | non-NULL iff a function can not be inlined. Also sets the reason | |
2459 | why. */ | |
c986baf6 | 2460 | |
c986baf6 | 2461 | static tree |
726a989a RB |
2462 | inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p, |
2463 | struct walk_stmt_info *wip) | |
c986baf6 | 2464 | { |
726a989a | 2465 | tree fn = (tree) wip->info; |
f08545a8 | 2466 | tree t; |
726a989a | 2467 | gimple stmt = gsi_stmt (*gsi); |
c986baf6 | 2468 | |
726a989a | 2469 | switch (gimple_code (stmt)) |
f08545a8 | 2470 | { |
726a989a | 2471 | case GIMPLE_CALL: |
3197c4fd AS |
2472 | /* Refuse to inline alloca call unless user explicitly forced so as |
2473 | this may change program's memory overhead drastically when the | |
2474 | function using alloca is called in loop. In GCC present in | |
2475 | SPEC2000 inlining into schedule_block cause it to require 2GB of | |
2476 | RAM instead of 256MB. */ | |
726a989a | 2477 | if (gimple_alloca_call_p (stmt) |
f08545a8 JH |
2478 | && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))) |
2479 | { | |
ddd2d57e | 2480 | inline_forbidden_reason |
dee15844 | 2481 | = G_("function %q+F can never be inlined because it uses " |
ddd2d57e | 2482 | "alloca (override using the always_inline attribute)"); |
726a989a RB |
2483 | *handled_ops_p = true; |
2484 | return fn; | |
f08545a8 | 2485 | } |
726a989a RB |
2486 | |
2487 | t = gimple_call_fndecl (stmt); | |
2488 | if (t == NULL_TREE) | |
f08545a8 | 2489 | break; |
84f5e1b1 | 2490 | |
f08545a8 JH |
2491 | /* We cannot inline functions that call setjmp. */ |
2492 | if (setjmp_call_p (t)) | |
2493 | { | |
ddd2d57e | 2494 | inline_forbidden_reason |
dee15844 | 2495 | = G_("function %q+F can never be inlined because it uses setjmp"); |
726a989a RB |
2496 | *handled_ops_p = true; |
2497 | return t; | |
f08545a8 JH |
2498 | } |
2499 | ||
6de9cd9a | 2500 | if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL) |
3197c4fd | 2501 | switch (DECL_FUNCTION_CODE (t)) |
f08545a8 | 2502 | { |
3197c4fd AS |
2503 | /* We cannot inline functions that take a variable number of |
2504 | arguments. */ | |
2505 | case BUILT_IN_VA_START: | |
3197c4fd AS |
2506 | case BUILT_IN_NEXT_ARG: |
2507 | case BUILT_IN_VA_END: | |
6de9cd9a | 2508 | inline_forbidden_reason |
dee15844 | 2509 | = G_("function %q+F can never be inlined because it " |
6de9cd9a | 2510 | "uses variable argument lists"); |
726a989a RB |
2511 | *handled_ops_p = true; |
2512 | return t; | |
6de9cd9a | 2513 | |
3197c4fd | 2514 | case BUILT_IN_LONGJMP: |
6de9cd9a DN |
2515 | /* We can't inline functions that call __builtin_longjmp at |
2516 | all. The non-local goto machinery really requires the | |
2517 | destination be in a different function. If we allow the | |
2518 | function calling __builtin_longjmp to be inlined into the | |
2519 | function calling __builtin_setjmp, Things will Go Awry. */ | |
2520 | inline_forbidden_reason | |
dee15844 | 2521 | = G_("function %q+F can never be inlined because " |
6de9cd9a | 2522 | "it uses setjmp-longjmp exception handling"); |
726a989a RB |
2523 | *handled_ops_p = true; |
2524 | return t; | |
6de9cd9a DN |
2525 | |
2526 | case BUILT_IN_NONLOCAL_GOTO: | |
2527 | /* Similarly. */ | |
2528 | inline_forbidden_reason | |
dee15844 | 2529 | = G_("function %q+F can never be inlined because " |
6de9cd9a | 2530 | "it uses non-local goto"); |
726a989a RB |
2531 | *handled_ops_p = true; |
2532 | return t; | |
f08545a8 | 2533 | |
4b284111 JJ |
2534 | case BUILT_IN_RETURN: |
2535 | case BUILT_IN_APPLY_ARGS: | |
2536 | /* If a __builtin_apply_args caller would be inlined, | |
2537 | it would be saving arguments of the function it has | |
2538 | been inlined into. Similarly __builtin_return would | |
2539 | return from the function the inline has been inlined into. */ | |
2540 | inline_forbidden_reason | |
dee15844 | 2541 | = G_("function %q+F can never be inlined because " |
4b284111 | 2542 | "it uses __builtin_return or __builtin_apply_args"); |
726a989a RB |
2543 | *handled_ops_p = true; |
2544 | return t; | |
4b284111 | 2545 | |
3197c4fd AS |
2546 | default: |
2547 | break; | |
2548 | } | |
f08545a8 JH |
2549 | break; |
2550 | ||
726a989a RB |
2551 | case GIMPLE_GOTO: |
2552 | t = gimple_goto_dest (stmt); | |
f08545a8 JH |
2553 | |
2554 | /* We will not inline a function which uses computed goto. The | |
2555 | addresses of its local labels, which may be tucked into | |
2556 | global storage, are of course not constant across | |
2557 | instantiations, which causes unexpected behavior. */ | |
2558 | if (TREE_CODE (t) != LABEL_DECL) | |
2559 | { | |
ddd2d57e | 2560 | inline_forbidden_reason |
dee15844 | 2561 | = G_("function %q+F can never be inlined " |
ddd2d57e | 2562 | "because it contains a computed goto"); |
726a989a RB |
2563 | *handled_ops_p = true; |
2564 | return t; | |
f08545a8 | 2565 | } |
6de9cd9a | 2566 | break; |
f08545a8 | 2567 | |
726a989a RB |
2568 | case GIMPLE_LABEL: |
2569 | t = gimple_label_label (stmt); | |
6de9cd9a | 2570 | if (DECL_NONLOCAL (t)) |
f08545a8 | 2571 | { |
6de9cd9a DN |
2572 | /* We cannot inline a function that receives a non-local goto |
2573 | because we cannot remap the destination label used in the | |
2574 | function that is performing the non-local goto. */ | |
ddd2d57e | 2575 | inline_forbidden_reason |
dee15844 | 2576 | = G_("function %q+F can never be inlined " |
6de9cd9a | 2577 | "because it receives a non-local goto"); |
726a989a RB |
2578 | *handled_ops_p = true; |
2579 | return t; | |
f08545a8 | 2580 | } |
f08545a8 JH |
2581 | break; |
2582 | ||
f08545a8 JH |
2583 | default: |
2584 | break; | |
2585 | } | |
2586 | ||
726a989a | 2587 | *handled_ops_p = false; |
f08545a8 | 2588 | return NULL_TREE; |
84f5e1b1 RH |
2589 | } |
2590 | ||
726a989a | 2591 | |
2092ee7d JJ |
2592 | static tree |
2593 | inline_forbidden_p_2 (tree *nodep, int *walk_subtrees, | |
2594 | void *fnp) | |
2595 | { | |
2596 | tree node = *nodep; | |
2597 | tree fn = (tree) fnp; | |
2598 | ||
2599 | if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn) | |
2600 | { | |
2601 | inline_forbidden_reason | |
2602 | = G_("function %q+F can never be inlined " | |
2603 | "because it saves address of local label in a static variable"); | |
2604 | return node; | |
2605 | } | |
2606 | ||
2607 | if (TYPE_P (node)) | |
2608 | *walk_subtrees = 0; | |
2609 | ||
2610 | return NULL_TREE; | |
2611 | } | |
2612 | ||
726a989a RB |
2613 | /* Return true if FNDECL is a function that cannot be inlined into |
2614 | another one. */ | |
2615 | ||
2616 | static bool | |
f08545a8 | 2617 | inline_forbidden_p (tree fndecl) |
84f5e1b1 | 2618 | { |
070588f0 | 2619 | location_t saved_loc = input_location; |
2092ee7d JJ |
2620 | struct function *fun = DECL_STRUCT_FUNCTION (fndecl); |
2621 | tree step; | |
726a989a RB |
2622 | struct walk_stmt_info wi; |
2623 | struct pointer_set_t *visited_nodes; | |
2624 | basic_block bb; | |
2625 | bool forbidden_p = false; | |
2626 | ||
2627 | visited_nodes = pointer_set_create (); | |
2628 | memset (&wi, 0, sizeof (wi)); | |
2629 | wi.info = (void *) fndecl; | |
2630 | wi.pset = visited_nodes; | |
e21aff8a | 2631 | |
2092ee7d | 2632 | FOR_EACH_BB_FN (bb, fun) |
726a989a RB |
2633 | { |
2634 | gimple ret; | |
2635 | gimple_seq seq = bb_seq (bb); | |
2636 | ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, | |
2637 | inline_forbidden_p_op, &wi); | |
2638 | forbidden_p = (ret != NULL); | |
2639 | if (forbidden_p) | |
2640 | goto egress; | |
2641 | } | |
ed397c43 | 2642 | |
cb91fab0 | 2643 | for (step = fun->local_decls; step; step = TREE_CHAIN (step)) |
2092ee7d JJ |
2644 | { |
2645 | tree decl = TREE_VALUE (step); | |
2646 | if (TREE_CODE (decl) == VAR_DECL | |
2647 | && TREE_STATIC (decl) | |
2648 | && !DECL_EXTERNAL (decl) | |
2649 | && DECL_INITIAL (decl)) | |
726a989a RB |
2650 | { |
2651 | tree ret; | |
2652 | ret = walk_tree_without_duplicates (&DECL_INITIAL (decl), | |
2653 | inline_forbidden_p_2, fndecl); | |
2654 | forbidden_p = (ret != NULL); | |
2655 | if (forbidden_p) | |
2656 | goto egress; | |
2657 | } | |
2092ee7d JJ |
2658 | } |
2659 | ||
e21aff8a | 2660 | egress: |
726a989a | 2661 | pointer_set_destroy (visited_nodes); |
070588f0 | 2662 | input_location = saved_loc; |
726a989a | 2663 | return forbidden_p; |
84f5e1b1 RH |
2664 | } |
2665 | ||
b3c3af2f SB |
2666 | /* Returns nonzero if FN is a function that does not have any |
2667 | fundamental inline blocking properties. */ | |
d4e4baa9 | 2668 | |
b3c3af2f SB |
2669 | static bool |
2670 | inlinable_function_p (tree fn) | |
d4e4baa9 | 2671 | { |
b3c3af2f | 2672 | bool inlinable = true; |
18177c7e RG |
2673 | bool do_warning; |
2674 | tree always_inline; | |
d4e4baa9 AO |
2675 | |
2676 | /* If we've already decided this function shouldn't be inlined, | |
2677 | there's no need to check again. */ | |
2678 | if (DECL_UNINLINABLE (fn)) | |
b3c3af2f | 2679 | return false; |
d4e4baa9 | 2680 | |
18177c7e RG |
2681 | /* We only warn for functions declared `inline' by the user. */ |
2682 | do_warning = (warn_inline | |
18177c7e | 2683 | && DECL_DECLARED_INLINE_P (fn) |
0494626a | 2684 | && !DECL_NO_INLINE_WARNING_P (fn) |
18177c7e RG |
2685 | && !DECL_IN_SYSTEM_HEADER (fn)); |
2686 | ||
2687 | always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)); | |
2688 | ||
e90acd93 | 2689 | if (flag_no_inline |
18177c7e RG |
2690 | && always_inline == NULL) |
2691 | { | |
2692 | if (do_warning) | |
2693 | warning (OPT_Winline, "function %q+F can never be inlined because it " | |
2694 | "is suppressed using -fno-inline", fn); | |
2695 | inlinable = false; | |
2696 | } | |
2697 | ||
2698 | /* Don't auto-inline anything that might not be bound within | |
2699 | this unit of translation. */ | |
2700 | else if (!DECL_DECLARED_INLINE_P (fn) | |
2701 | && DECL_REPLACEABLE_P (fn)) | |
2702 | inlinable = false; | |
2703 | ||
2704 | else if (!function_attribute_inlinable_p (fn)) | |
2705 | { | |
2706 | if (do_warning) | |
2707 | warning (OPT_Winline, "function %q+F can never be inlined because it " | |
2708 | "uses attributes conflicting with inlining", fn); | |
2709 | inlinable = false; | |
2710 | } | |
46c5ad27 | 2711 | |
f08545a8 | 2712 | else if (inline_forbidden_p (fn)) |
b3c3af2f SB |
2713 | { |
2714 | /* See if we should warn about uninlinable functions. Previously, | |
2715 | some of these warnings would be issued while trying to expand | |
2716 | the function inline, but that would cause multiple warnings | |
2717 | about functions that would for example call alloca. But since | |
2718 | this a property of the function, just one warning is enough. | |
2719 | As a bonus we can now give more details about the reason why a | |
18177c7e RG |
2720 | function is not inlinable. */ |
2721 | if (always_inline) | |
dee15844 | 2722 | sorry (inline_forbidden_reason, fn); |
2d327012 | 2723 | else if (do_warning) |
d2fcbf6f | 2724 | warning (OPT_Winline, inline_forbidden_reason, fn); |
b3c3af2f SB |
2725 | |
2726 | inlinable = false; | |
2727 | } | |
d4e4baa9 AO |
2728 | |
2729 | /* Squirrel away the result so that we don't have to check again. */ | |
b3c3af2f | 2730 | DECL_UNINLINABLE (fn) = !inlinable; |
d4e4baa9 | 2731 | |
b3c3af2f SB |
2732 | return inlinable; |
2733 | } | |
2734 | ||
e5c4f28a RG |
2735 | /* Estimate the cost of a memory move. Use machine dependent |
2736 | word size and take possible memcpy call into account. */ | |
2737 | ||
2738 | int | |
2739 | estimate_move_cost (tree type) | |
2740 | { | |
2741 | HOST_WIDE_INT size; | |
2742 | ||
2743 | size = int_size_in_bytes (type); | |
2744 | ||
e04ad03d | 2745 | if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size)) |
e5c4f28a RG |
2746 | /* Cost of a memcpy call, 3 arguments and the call. */ |
2747 | return 4; | |
2748 | else | |
2749 | return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES); | |
2750 | } | |
2751 | ||
726a989a | 2752 | /* Returns cost of operation CODE, according to WEIGHTS */ |
7f9bc51b | 2753 | |
726a989a RB |
2754 | static int |
2755 | estimate_operator_cost (enum tree_code code, eni_weights *weights) | |
6de9cd9a | 2756 | { |
726a989a | 2757 | switch (code) |
6de9cd9a | 2758 | { |
726a989a RB |
2759 | /* These are "free" conversions, or their presumed cost |
2760 | is folded into other operations. */ | |
61fcaeec | 2761 | case RANGE_EXPR: |
1a87cf0c | 2762 | CASE_CONVERT: |
726a989a RB |
2763 | case COMPLEX_EXPR: |
2764 | case PAREN_EXPR: | |
726a989a | 2765 | return 0; |
6de9cd9a | 2766 | |
e5c4f28a RG |
2767 | /* Assign cost of 1 to usual operations. |
2768 | ??? We may consider mapping RTL costs to this. */ | |
6de9cd9a | 2769 | case COND_EXPR: |
4151978d | 2770 | case VEC_COND_EXPR: |
6de9cd9a DN |
2771 | |
2772 | case PLUS_EXPR: | |
5be014d5 | 2773 | case POINTER_PLUS_EXPR: |
6de9cd9a DN |
2774 | case MINUS_EXPR: |
2775 | case MULT_EXPR: | |
2776 | ||
325217ed | 2777 | case FIXED_CONVERT_EXPR: |
6de9cd9a | 2778 | case FIX_TRUNC_EXPR: |
6de9cd9a DN |
2779 | |
2780 | case NEGATE_EXPR: | |
2781 | case FLOAT_EXPR: | |
2782 | case MIN_EXPR: | |
2783 | case MAX_EXPR: | |
2784 | case ABS_EXPR: | |
2785 | ||
2786 | case LSHIFT_EXPR: | |
2787 | case RSHIFT_EXPR: | |
2788 | case LROTATE_EXPR: | |
2789 | case RROTATE_EXPR: | |
a6b46ba2 DN |
2790 | case VEC_LSHIFT_EXPR: |
2791 | case VEC_RSHIFT_EXPR: | |
6de9cd9a DN |
2792 | |
2793 | case BIT_IOR_EXPR: | |
2794 | case BIT_XOR_EXPR: | |
2795 | case BIT_AND_EXPR: | |
2796 | case BIT_NOT_EXPR: | |
2797 | ||
2798 | case TRUTH_ANDIF_EXPR: | |
2799 | case TRUTH_ORIF_EXPR: | |
2800 | case TRUTH_AND_EXPR: | |
2801 | case TRUTH_OR_EXPR: | |
2802 | case TRUTH_XOR_EXPR: | |
2803 | case TRUTH_NOT_EXPR: | |
2804 | ||
2805 | case LT_EXPR: | |
2806 | case LE_EXPR: | |
2807 | case GT_EXPR: | |
2808 | case GE_EXPR: | |
2809 | case EQ_EXPR: | |
2810 | case NE_EXPR: | |
2811 | case ORDERED_EXPR: | |
2812 | case UNORDERED_EXPR: | |
2813 | ||
2814 | case UNLT_EXPR: | |
2815 | case UNLE_EXPR: | |
2816 | case UNGT_EXPR: | |
2817 | case UNGE_EXPR: | |
2818 | case UNEQ_EXPR: | |
d1a7edaf | 2819 | case LTGT_EXPR: |
6de9cd9a | 2820 | |
6de9cd9a DN |
2821 | case CONJ_EXPR: |
2822 | ||
2823 | case PREDECREMENT_EXPR: | |
2824 | case PREINCREMENT_EXPR: | |
2825 | case POSTDECREMENT_EXPR: | |
2826 | case POSTINCREMENT_EXPR: | |
2827 | ||
16630a2c DN |
2828 | case REALIGN_LOAD_EXPR: |
2829 | ||
61d3cdbb DN |
2830 | case REDUC_MAX_EXPR: |
2831 | case REDUC_MIN_EXPR: | |
2832 | case REDUC_PLUS_EXPR: | |
20f06221 | 2833 | case WIDEN_SUM_EXPR: |
726a989a RB |
2834 | case WIDEN_MULT_EXPR: |
2835 | case DOT_PROD_EXPR: | |
2836 | ||
89d67cca DN |
2837 | case VEC_WIDEN_MULT_HI_EXPR: |
2838 | case VEC_WIDEN_MULT_LO_EXPR: | |
2839 | case VEC_UNPACK_HI_EXPR: | |
2840 | case VEC_UNPACK_LO_EXPR: | |
d9987fb4 UB |
2841 | case VEC_UNPACK_FLOAT_HI_EXPR: |
2842 | case VEC_UNPACK_FLOAT_LO_EXPR: | |
8115817b | 2843 | case VEC_PACK_TRUNC_EXPR: |
89d67cca | 2844 | case VEC_PACK_SAT_EXPR: |
d9987fb4 | 2845 | case VEC_PACK_FIX_TRUNC_EXPR: |
98b44b0e IR |
2846 | case VEC_EXTRACT_EVEN_EXPR: |
2847 | case VEC_EXTRACT_ODD_EXPR: | |
2848 | case VEC_INTERLEAVE_HIGH_EXPR: | |
2849 | case VEC_INTERLEAVE_LOW_EXPR: | |
2850 | ||
726a989a | 2851 | return 1; |
6de9cd9a | 2852 | |
1ea7e6ad | 2853 | /* Few special cases of expensive operations. This is useful |
6de9cd9a DN |
2854 | to avoid inlining on functions having too many of these. */ |
2855 | case TRUNC_DIV_EXPR: | |
2856 | case CEIL_DIV_EXPR: | |
2857 | case FLOOR_DIV_EXPR: | |
2858 | case ROUND_DIV_EXPR: | |
2859 | case EXACT_DIV_EXPR: | |
2860 | case TRUNC_MOD_EXPR: | |
2861 | case CEIL_MOD_EXPR: | |
2862 | case FLOOR_MOD_EXPR: | |
2863 | case ROUND_MOD_EXPR: | |
2864 | case RDIV_EXPR: | |
726a989a RB |
2865 | return weights->div_mod_cost; |
2866 | ||
2867 | default: | |
2868 | /* We expect a copy assignment with no operator. */ | |
2869 | gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS); | |
2870 | return 0; | |
2871 | } | |
2872 | } | |
2873 | ||
2874 | ||
2875 | /* Estimate number of instructions that will be created by expanding | |
2876 | the statements in the statement sequence STMTS. | |
2877 | WEIGHTS contains weights attributed to various constructs. */ | |
2878 | ||
2879 | static | |
2880 | int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights) | |
2881 | { | |
2882 | int cost; | |
2883 | gimple_stmt_iterator gsi; | |
2884 | ||
2885 | cost = 0; | |
2886 | for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi)) | |
2887 | cost += estimate_num_insns (gsi_stmt (gsi), weights); | |
2888 | ||
2889 | return cost; | |
2890 | } | |
2891 | ||
2892 | ||
2893 | /* Estimate number of instructions that will be created by expanding STMT. | |
2894 | WEIGHTS contains weights attributed to various constructs. */ | |
2895 | ||
2896 | int | |
2897 | estimate_num_insns (gimple stmt, eni_weights *weights) | |
2898 | { | |
2899 | unsigned cost, i; | |
2900 | enum gimple_code code = gimple_code (stmt); | |
2901 | tree lhs; | |
2902 | ||
2903 | switch (code) | |
2904 | { | |
2905 | case GIMPLE_ASSIGN: | |
2906 | /* Try to estimate the cost of assignments. We have three cases to | |
2907 | deal with: | |
2908 | 1) Simple assignments to registers; | |
2909 | 2) Stores to things that must live in memory. This includes | |
2910 | "normal" stores to scalars, but also assignments of large | |
2911 | structures, or constructors of big arrays; | |
2912 | ||
2913 | Let us look at the first two cases, assuming we have "a = b + C": | |
2914 | <GIMPLE_ASSIGN <var_decl "a"> | |
2915 | <plus_expr <var_decl "b"> <constant C>> | |
2916 | If "a" is a GIMPLE register, the assignment to it is free on almost | |
2917 | any target, because "a" usually ends up in a real register. Hence | |
2918 | the only cost of this expression comes from the PLUS_EXPR, and we | |
2919 | can ignore the GIMPLE_ASSIGN. | |
2920 | If "a" is not a GIMPLE register, the assignment to "a" will most | |
2921 | likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost | |
2922 | of moving something into "a", which we compute using the function | |
2923 | estimate_move_cost. */ | |
2924 | lhs = gimple_assign_lhs (stmt); | |
2925 | if (is_gimple_reg (lhs)) | |
2926 | cost = 0; | |
2927 | else | |
2928 | cost = estimate_move_cost (TREE_TYPE (lhs)); | |
2929 | ||
2930 | cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights); | |
2931 | break; | |
2932 | ||
2933 | case GIMPLE_COND: | |
2934 | cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights); | |
2935 | break; | |
2936 | ||
2937 | case GIMPLE_SWITCH: | |
2938 | /* Take into account cost of the switch + guess 2 conditional jumps for | |
2939 | each case label. | |
2940 | ||
2941 | TODO: once the switch expansion logic is sufficiently separated, we can | |
2942 | do better job on estimating cost of the switch. */ | |
2943 | cost = gimple_switch_num_labels (stmt) * 2; | |
6de9cd9a | 2944 | break; |
726a989a RB |
2945 | |
2946 | case GIMPLE_CALL: | |
6de9cd9a | 2947 | { |
726a989a RB |
2948 | tree decl = gimple_call_fndecl (stmt); |
2949 | tree addr = gimple_call_fn (stmt); | |
8723e2fe JH |
2950 | tree funtype = TREE_TYPE (addr); |
2951 | ||
726a989a RB |
2952 | if (POINTER_TYPE_P (funtype)) |
2953 | funtype = TREE_TYPE (funtype); | |
6de9cd9a | 2954 | |
625a2efb | 2955 | if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD) |
726a989a | 2956 | cost = weights->target_builtin_call_cost; |
625a2efb | 2957 | else |
726a989a | 2958 | cost = weights->call_cost; |
625a2efb | 2959 | |
8c96cd51 | 2960 | if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL) |
6de9cd9a DN |
2961 | switch (DECL_FUNCTION_CODE (decl)) |
2962 | { | |
2963 | case BUILT_IN_CONSTANT_P: | |
726a989a | 2964 | return 0; |
6de9cd9a | 2965 | case BUILT_IN_EXPECT: |
726a989a RB |
2966 | cost = 0; |
2967 | break; | |
2968 | ||
7f9bc51b ZD |
2969 | /* Prefetch instruction is not expensive. */ |
2970 | case BUILT_IN_PREFETCH: | |
726a989a | 2971 | cost = weights->target_builtin_call_cost; |
7f9bc51b | 2972 | break; |
726a989a | 2973 | |
6de9cd9a DN |
2974 | default: |
2975 | break; | |
2976 | } | |
e5c4f28a | 2977 | |
8723e2fe JH |
2978 | if (decl) |
2979 | funtype = TREE_TYPE (decl); | |
2980 | ||
726a989a RB |
2981 | /* Our cost must be kept in sync with |
2982 | cgraph_estimate_size_after_inlining that does use function | |
2983 | declaration to figure out the arguments. */ | |
8723e2fe JH |
2984 | if (decl && DECL_ARGUMENTS (decl)) |
2985 | { | |
2986 | tree arg; | |
2987 | for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg)) | |
726a989a | 2988 | cost += estimate_move_cost (TREE_TYPE (arg)); |
8723e2fe JH |
2989 | } |
2990 | else if (funtype && prototype_p (funtype)) | |
2991 | { | |
2992 | tree t; | |
2993 | for (t = TYPE_ARG_TYPES (funtype); t; t = TREE_CHAIN (t)) | |
726a989a | 2994 | cost += estimate_move_cost (TREE_VALUE (t)); |
8723e2fe JH |
2995 | } |
2996 | else | |
c7f599d0 | 2997 | { |
726a989a RB |
2998 | for (i = 0; i < gimple_call_num_args (stmt); i++) |
2999 | { | |
3000 | tree arg = gimple_call_arg (stmt, i); | |
3001 | cost += estimate_move_cost (TREE_TYPE (arg)); | |
3002 | } | |
c7f599d0 | 3003 | } |
e5c4f28a | 3004 | |
6de9cd9a DN |
3005 | break; |
3006 | } | |
88f4034b | 3007 | |
726a989a RB |
3008 | case GIMPLE_GOTO: |
3009 | case GIMPLE_LABEL: | |
3010 | case GIMPLE_NOP: | |
3011 | case GIMPLE_PHI: | |
3012 | case GIMPLE_RETURN: | |
3013 | case GIMPLE_CHANGE_DYNAMIC_TYPE: | |
3014 | case GIMPLE_PREDICT: | |
3015 | return 0; | |
3016 | ||
3017 | case GIMPLE_ASM: | |
3018 | case GIMPLE_RESX: | |
3019 | return 1; | |
3020 | ||
3021 | case GIMPLE_BIND: | |
3022 | return estimate_num_insns_seq (gimple_bind_body (stmt), weights); | |
3023 | ||
3024 | case GIMPLE_EH_FILTER: | |
3025 | return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights); | |
3026 | ||
3027 | case GIMPLE_CATCH: | |
3028 | return estimate_num_insns_seq (gimple_catch_handler (stmt), weights); | |
3029 | ||
3030 | case GIMPLE_TRY: | |
3031 | return (estimate_num_insns_seq (gimple_try_eval (stmt), weights) | |
3032 | + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights)); | |
3033 | ||
3034 | /* OpenMP directives are generally very expensive. */ | |
3035 | ||
3036 | case GIMPLE_OMP_RETURN: | |
3037 | case GIMPLE_OMP_SECTIONS_SWITCH: | |
3038 | case GIMPLE_OMP_ATOMIC_STORE: | |
3039 | case GIMPLE_OMP_CONTINUE: | |
3040 | /* ...except these, which are cheap. */ | |
3041 | return 0; | |
3042 | ||
3043 | case GIMPLE_OMP_ATOMIC_LOAD: | |
3044 | return weights->omp_cost; | |
3045 | ||
3046 | case GIMPLE_OMP_FOR: | |
3047 | return (weights->omp_cost | |
3048 | + estimate_num_insns_seq (gimple_omp_body (stmt), weights) | |
3049 | + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights)); | |
3050 | ||
3051 | case GIMPLE_OMP_PARALLEL: | |
3052 | case GIMPLE_OMP_TASK: | |
3053 | case GIMPLE_OMP_CRITICAL: | |
3054 | case GIMPLE_OMP_MASTER: | |
3055 | case GIMPLE_OMP_ORDERED: | |
3056 | case GIMPLE_OMP_SECTION: | |
3057 | case GIMPLE_OMP_SECTIONS: | |
3058 | case GIMPLE_OMP_SINGLE: | |
3059 | return (weights->omp_cost | |
3060 | + estimate_num_insns_seq (gimple_omp_body (stmt), weights)); | |
88f4034b | 3061 | |
6de9cd9a | 3062 | default: |
1e128c5f | 3063 | gcc_unreachable (); |
6de9cd9a | 3064 | } |
726a989a RB |
3065 | |
3066 | return cost; | |
6de9cd9a DN |
3067 | } |
3068 | ||
726a989a RB |
3069 | /* Estimate number of instructions that will be created by expanding |
3070 | function FNDECL. WEIGHTS contains weights attributed to various | |
3071 | constructs. */ | |
aa4a53af | 3072 | |
6de9cd9a | 3073 | int |
726a989a | 3074 | estimate_num_insns_fn (tree fndecl, eni_weights *weights) |
6de9cd9a | 3075 | { |
726a989a RB |
3076 | struct function *my_function = DECL_STRUCT_FUNCTION (fndecl); |
3077 | gimple_stmt_iterator bsi; | |
e21aff8a | 3078 | basic_block bb; |
726a989a | 3079 | int n = 0; |
e21aff8a | 3080 | |
726a989a RB |
3081 | gcc_assert (my_function && my_function->cfg); |
3082 | FOR_EACH_BB_FN (bb, my_function) | |
e21aff8a | 3083 | { |
726a989a RB |
3084 | for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) |
3085 | n += estimate_num_insns (gsi_stmt (bsi), weights); | |
e21aff8a | 3086 | } |
e21aff8a | 3087 | |
726a989a | 3088 | return n; |
7f9bc51b ZD |
3089 | } |
3090 | ||
726a989a | 3091 | |
7f9bc51b ZD |
3092 | /* Initializes weights used by estimate_num_insns. */ |
3093 | ||
3094 | void | |
3095 | init_inline_once (void) | |
3096 | { | |
3097 | eni_inlining_weights.call_cost = PARAM_VALUE (PARAM_INLINE_CALL_COST); | |
625a2efb | 3098 | eni_inlining_weights.target_builtin_call_cost = 1; |
7f9bc51b | 3099 | eni_inlining_weights.div_mod_cost = 10; |
7f9bc51b ZD |
3100 | eni_inlining_weights.omp_cost = 40; |
3101 | ||
3102 | eni_size_weights.call_cost = 1; | |
625a2efb | 3103 | eni_size_weights.target_builtin_call_cost = 1; |
7f9bc51b | 3104 | eni_size_weights.div_mod_cost = 1; |
7f9bc51b ZD |
3105 | eni_size_weights.omp_cost = 40; |
3106 | ||
3107 | /* Estimating time for call is difficult, since we have no idea what the | |
3108 | called function does. In the current uses of eni_time_weights, | |
3109 | underestimating the cost does less harm than overestimating it, so | |
ea2c620c | 3110 | we choose a rather small value here. */ |
7f9bc51b | 3111 | eni_time_weights.call_cost = 10; |
625a2efb | 3112 | eni_time_weights.target_builtin_call_cost = 10; |
7f9bc51b | 3113 | eni_time_weights.div_mod_cost = 10; |
7f9bc51b | 3114 | eni_time_weights.omp_cost = 40; |
6de9cd9a DN |
3115 | } |
3116 | ||
726a989a RB |
3117 | /* Estimate the number of instructions in a gimple_seq. */ |
3118 | ||
3119 | int | |
3120 | count_insns_seq (gimple_seq seq, eni_weights *weights) | |
3121 | { | |
3122 | gimple_stmt_iterator gsi; | |
3123 | int n = 0; | |
3124 | for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3125 | n += estimate_num_insns (gsi_stmt (gsi), weights); | |
3126 | ||
3127 | return n; | |
3128 | } | |
3129 | ||
3130 | ||
e21aff8a | 3131 | /* Install new lexical TREE_BLOCK underneath 'current_block'. */ |
726a989a | 3132 | |
e21aff8a | 3133 | static void |
4a283090 | 3134 | prepend_lexical_block (tree current_block, tree new_block) |
e21aff8a | 3135 | { |
4a283090 JH |
3136 | BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block); |
3137 | BLOCK_SUBBLOCKS (current_block) = new_block; | |
e21aff8a | 3138 | BLOCK_SUPERCONTEXT (new_block) = current_block; |
e21aff8a SB |
3139 | } |
3140 | ||
3e293154 MJ |
3141 | /* Fetch callee declaration from the call graph edge going from NODE and |
3142 | associated with STMR call statement. Return NULL_TREE if not found. */ | |
3143 | static tree | |
726a989a | 3144 | get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt) |
3e293154 MJ |
3145 | { |
3146 | struct cgraph_edge *cs; | |
3147 | ||
3148 | cs = cgraph_edge (node, stmt); | |
3149 | if (cs) | |
3150 | return cs->callee->decl; | |
3151 | ||
3152 | return NULL_TREE; | |
3153 | } | |
3154 | ||
726a989a | 3155 | /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */ |
d4e4baa9 | 3156 | |
e21aff8a | 3157 | static bool |
726a989a | 3158 | expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id) |
d4e4baa9 | 3159 | { |
1ea193c2 | 3160 | tree retvar, use_retvar; |
d436bff8 | 3161 | tree fn; |
6be42dd4 | 3162 | struct pointer_map_t *st; |
110cfe1c | 3163 | tree return_slot; |
7740f00d | 3164 | tree modify_dest; |
6de9cd9a | 3165 | location_t saved_location; |
e21aff8a | 3166 | struct cgraph_edge *cg_edge; |
61a05df1 | 3167 | cgraph_inline_failed_t reason; |
e21aff8a SB |
3168 | basic_block return_block; |
3169 | edge e; | |
726a989a | 3170 | gimple_stmt_iterator gsi, stmt_gsi; |
e21aff8a | 3171 | bool successfully_inlined = FALSE; |
4f6c2131 | 3172 | bool purge_dead_abnormal_edges; |
e21aff8a SB |
3173 | tree t_step; |
3174 | tree var; | |
d4e4baa9 | 3175 | |
6de9cd9a DN |
3176 | /* Set input_location here so we get the right instantiation context |
3177 | if we call instantiate_decl from inlinable_function_p. */ | |
3178 | saved_location = input_location; | |
726a989a RB |
3179 | if (gimple_has_location (stmt)) |
3180 | input_location = gimple_location (stmt); | |
6de9cd9a | 3181 | |
d4e4baa9 | 3182 | /* From here on, we're only interested in CALL_EXPRs. */ |
726a989a | 3183 | if (gimple_code (stmt) != GIMPLE_CALL) |
6de9cd9a | 3184 | goto egress; |
d4e4baa9 AO |
3185 | |
3186 | /* First, see if we can figure out what function is being called. | |
3187 | If we cannot, then there is no hope of inlining the function. */ | |
726a989a | 3188 | fn = gimple_call_fndecl (stmt); |
d4e4baa9 | 3189 | if (!fn) |
3e293154 MJ |
3190 | { |
3191 | fn = get_indirect_callee_fndecl (id->dst_node, stmt); | |
3192 | if (!fn) | |
3193 | goto egress; | |
3194 | } | |
d4e4baa9 | 3195 | |
b58b1157 | 3196 | /* Turn forward declarations into real ones. */ |
d4d1ebc1 | 3197 | fn = cgraph_node (fn)->decl; |
b58b1157 | 3198 | |
726a989a | 3199 | /* If FN is a declaration of a function in a nested scope that was |
a1a0fd4e AO |
3200 | globally declared inline, we don't set its DECL_INITIAL. |
3201 | However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the | |
3202 | C++ front-end uses it for cdtors to refer to their internal | |
3203 | declarations, that are not real functions. Fortunately those | |
3204 | don't have trees to be saved, so we can tell by checking their | |
726a989a RB |
3205 | gimple_body. */ |
3206 | if (!DECL_INITIAL (fn) | |
a1a0fd4e | 3207 | && DECL_ABSTRACT_ORIGIN (fn) |
39ecc018 | 3208 | && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn))) |
a1a0fd4e AO |
3209 | fn = DECL_ABSTRACT_ORIGIN (fn); |
3210 | ||
18c6ada9 JH |
3211 | /* Objective C and fortran still calls tree_rest_of_compilation directly. |
3212 | Kill this check once this is fixed. */ | |
1b369fae | 3213 | if (!id->dst_node->analyzed) |
6de9cd9a | 3214 | goto egress; |
18c6ada9 | 3215 | |
1b369fae | 3216 | cg_edge = cgraph_edge (id->dst_node, stmt); |
18c6ada9 JH |
3217 | |
3218 | /* Constant propagation on argument done during previous inlining | |
3219 | may create new direct call. Produce an edge for it. */ | |
e21aff8a | 3220 | if (!cg_edge) |
18c6ada9 JH |
3221 | { |
3222 | struct cgraph_node *dest = cgraph_node (fn); | |
3223 | ||
6de9cd9a DN |
3224 | /* We have missing edge in the callgraph. This can happen in one case |
3225 | where previous inlining turned indirect call into direct call by | |
3226 | constant propagating arguments. In all other cases we hit a bug | |
3227 | (incorrect node sharing is most common reason for missing edges. */ | |
7e8b322a | 3228 | gcc_assert (dest->needed); |
1b369fae | 3229 | cgraph_create_edge (id->dst_node, dest, stmt, |
45a80bb9 JH |
3230 | bb->count, CGRAPH_FREQ_BASE, |
3231 | bb->loop_depth)->inline_failed | |
61a05df1 | 3232 | = CIF_ORIGINALLY_INDIRECT_CALL; |
45a80bb9 JH |
3233 | if (dump_file) |
3234 | { | |
3235 | fprintf (dump_file, "Created new direct edge to %s", | |
3236 | cgraph_node_name (dest)); | |
3237 | } | |
6de9cd9a | 3238 | goto egress; |
18c6ada9 JH |
3239 | } |
3240 | ||
d4e4baa9 AO |
3241 | /* Don't try to inline functions that are not well-suited to |
3242 | inlining. */ | |
e21aff8a | 3243 | if (!cgraph_inline_p (cg_edge, &reason)) |
a833faa5 | 3244 | { |
3e293154 MJ |
3245 | /* If this call was originally indirect, we do not want to emit any |
3246 | inlining related warnings or sorry messages because there are no | |
3247 | guarantees regarding those. */ | |
3248 | if (cg_edge->indirect_call) | |
3249 | goto egress; | |
3250 | ||
7fac66d4 JH |
3251 | if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)) |
3252 | /* Avoid warnings during early inline pass. */ | |
7e8b322a | 3253 | && cgraph_global_info_ready) |
2d327012 | 3254 | { |
61a05df1 JH |
3255 | sorry ("inlining failed in call to %q+F: %s", fn, |
3256 | cgraph_inline_failed_string (reason)); | |
2d327012 JH |
3257 | sorry ("called from here"); |
3258 | } | |
3259 | else if (warn_inline && DECL_DECLARED_INLINE_P (fn) | |
3260 | && !DECL_IN_SYSTEM_HEADER (fn) | |
61a05df1 | 3261 | && reason != CIF_UNSPECIFIED |
d63db217 JH |
3262 | && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)) |
3263 | /* Avoid warnings during early inline pass. */ | |
7e8b322a | 3264 | && cgraph_global_info_ready) |
a833faa5 | 3265 | { |
dee15844 | 3266 | warning (OPT_Winline, "inlining failed in call to %q+F: %s", |
61a05df1 | 3267 | fn, cgraph_inline_failed_string (reason)); |
3176a0c2 | 3268 | warning (OPT_Winline, "called from here"); |
a833faa5 | 3269 | } |
6de9cd9a | 3270 | goto egress; |
a833faa5 | 3271 | } |
ea99e0be | 3272 | fn = cg_edge->callee->decl; |
d4e4baa9 | 3273 | |
18c6ada9 | 3274 | #ifdef ENABLE_CHECKING |
1b369fae | 3275 | if (cg_edge->callee->decl != id->dst_node->decl) |
e21aff8a | 3276 | verify_cgraph_node (cg_edge->callee); |
18c6ada9 JH |
3277 | #endif |
3278 | ||
e21aff8a | 3279 | /* We will be inlining this callee. */ |
e21aff8a SB |
3280 | id->eh_region = lookup_stmt_eh_region (stmt); |
3281 | ||
726a989a | 3282 | /* Split the block holding the GIMPLE_CALL. */ |
e21aff8a SB |
3283 | e = split_block (bb, stmt); |
3284 | bb = e->src; | |
3285 | return_block = e->dest; | |
3286 | remove_edge (e); | |
3287 | ||
4f6c2131 EB |
3288 | /* split_block splits after the statement; work around this by |
3289 | moving the call into the second block manually. Not pretty, | |
3290 | but seems easier than doing the CFG manipulation by hand | |
726a989a RB |
3291 | when the GIMPLE_CALL is in the last statement of BB. */ |
3292 | stmt_gsi = gsi_last_bb (bb); | |
3293 | gsi_remove (&stmt_gsi, false); | |
4f6c2131 | 3294 | |
726a989a | 3295 | /* If the GIMPLE_CALL was in the last statement of BB, it may have |
4f6c2131 EB |
3296 | been the source of abnormal edges. In this case, schedule |
3297 | the removal of dead abnormal edges. */ | |
726a989a RB |
3298 | gsi = gsi_start_bb (return_block); |
3299 | if (gsi_end_p (gsi)) | |
e21aff8a | 3300 | { |
726a989a | 3301 | gsi_insert_after (&gsi, stmt, GSI_NEW_STMT); |
4f6c2131 | 3302 | purge_dead_abnormal_edges = true; |
e21aff8a | 3303 | } |
4f6c2131 EB |
3304 | else |
3305 | { | |
726a989a | 3306 | gsi_insert_before (&gsi, stmt, GSI_NEW_STMT); |
4f6c2131 EB |
3307 | purge_dead_abnormal_edges = false; |
3308 | } | |
3309 | ||
726a989a | 3310 | stmt_gsi = gsi_start_bb (return_block); |
742a37d5 | 3311 | |
d436bff8 AH |
3312 | /* Build a block containing code to initialize the arguments, the |
3313 | actual inline expansion of the body, and a label for the return | |
3314 | statements within the function to jump to. The type of the | |
3315 | statement expression is the return type of the function call. */ | |
e21aff8a SB |
3316 | id->block = make_node (BLOCK); |
3317 | BLOCK_ABSTRACT_ORIGIN (id->block) = fn; | |
3e2844cb | 3318 | BLOCK_SOURCE_LOCATION (id->block) = input_location; |
4a283090 | 3319 | prepend_lexical_block (gimple_block (stmt), id->block); |
e21aff8a | 3320 | |
d4e4baa9 AO |
3321 | /* Local declarations will be replaced by their equivalents in this |
3322 | map. */ | |
3323 | st = id->decl_map; | |
6be42dd4 | 3324 | id->decl_map = pointer_map_create (); |
d4e4baa9 | 3325 | |
e21aff8a | 3326 | /* Record the function we are about to inline. */ |
1b369fae RH |
3327 | id->src_fn = fn; |
3328 | id->src_node = cg_edge->callee; | |
110cfe1c | 3329 | id->src_cfun = DECL_STRUCT_FUNCTION (fn); |
726a989a | 3330 | id->gimple_call = stmt; |
1b369fae | 3331 | |
3c8da8a5 AO |
3332 | gcc_assert (!id->src_cfun->after_inlining); |
3333 | ||
045685a9 | 3334 | id->entry_bb = bb; |
7299cb99 JH |
3335 | if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn))) |
3336 | { | |
3337 | gimple_stmt_iterator si = gsi_last_bb (bb); | |
3338 | gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION, | |
3339 | NOT_TAKEN), | |
3340 | GSI_NEW_STMT); | |
3341 | } | |
726a989a | 3342 | initialize_inlined_parameters (id, stmt, fn, bb); |
d4e4baa9 | 3343 | |
ea99e0be | 3344 | if (DECL_INITIAL (fn)) |
4a283090 | 3345 | prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id)); |
acb8f212 | 3346 | |
d4e4baa9 AO |
3347 | /* Return statements in the function body will be replaced by jumps |
3348 | to the RET_LABEL. */ | |
1e128c5f GB |
3349 | gcc_assert (DECL_INITIAL (fn)); |
3350 | gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK); | |
23700f65 | 3351 | |
726a989a | 3352 | /* Find the LHS to which the result of this call is assigned. */ |
110cfe1c | 3353 | return_slot = NULL; |
726a989a | 3354 | if (gimple_call_lhs (stmt)) |
81bafd36 | 3355 | { |
726a989a | 3356 | modify_dest = gimple_call_lhs (stmt); |
81bafd36 ILT |
3357 | |
3358 | /* The function which we are inlining might not return a value, | |
3359 | in which case we should issue a warning that the function | |
3360 | does not return a value. In that case the optimizers will | |
3361 | see that the variable to which the value is assigned was not | |
3362 | initialized. We do not want to issue a warning about that | |
3363 | uninitialized variable. */ | |
3364 | if (DECL_P (modify_dest)) | |
3365 | TREE_NO_WARNING (modify_dest) = 1; | |
726a989a RB |
3366 | |
3367 | if (gimple_call_return_slot_opt_p (stmt)) | |
fa47911c | 3368 | { |
110cfe1c | 3369 | return_slot = modify_dest; |
fa47911c JM |
3370 | modify_dest = NULL; |
3371 | } | |
81bafd36 | 3372 | } |
7740f00d RH |
3373 | else |
3374 | modify_dest = NULL; | |
3375 | ||
1ea193c2 ILT |
3376 | /* If we are inlining a call to the C++ operator new, we don't want |
3377 | to use type based alias analysis on the return value. Otherwise | |
3378 | we may get confused if the compiler sees that the inlined new | |
3379 | function returns a pointer which was just deleted. See bug | |
3380 | 33407. */ | |
3381 | if (DECL_IS_OPERATOR_NEW (fn)) | |
3382 | { | |
3383 | return_slot = NULL; | |
3384 | modify_dest = NULL; | |
3385 | } | |
3386 | ||
d4e4baa9 | 3387 | /* Declare the return variable for the function. */ |
726a989a | 3388 | retvar = declare_return_variable (id, return_slot, modify_dest, &use_retvar); |
1ea193c2 ILT |
3389 | |
3390 | if (DECL_IS_OPERATOR_NEW (fn)) | |
3391 | { | |
3392 | gcc_assert (TREE_CODE (retvar) == VAR_DECL | |
3393 | && POINTER_TYPE_P (TREE_TYPE (retvar))); | |
3394 | DECL_NO_TBAA_P (retvar) = 1; | |
3395 | } | |
d4e4baa9 | 3396 | |
acb8f212 | 3397 | /* Add local vars in this inlined callee to caller. */ |
cb91fab0 | 3398 | t_step = id->src_cfun->local_decls; |
acb8f212 JH |
3399 | for (; t_step; t_step = TREE_CHAIN (t_step)) |
3400 | { | |
3401 | var = TREE_VALUE (t_step); | |
3402 | if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var)) | |
eb50f5f4 | 3403 | { |
65401a0b | 3404 | if (var_ann (var) && add_referenced_var (var)) |
eb50f5f4 JH |
3405 | cfun->local_decls = tree_cons (NULL_TREE, var, |
3406 | cfun->local_decls); | |
3407 | } | |
526d73ab JH |
3408 | else if (!can_be_nonlocal (var, id)) |
3409 | cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id), | |
3410 | cfun->local_decls); | |
acb8f212 JH |
3411 | } |
3412 | ||
eb50f5f4 JH |
3413 | /* This is it. Duplicate the callee body. Assume callee is |
3414 | pre-gimplified. Note that we must not alter the caller | |
3415 | function in any way before this point, as this CALL_EXPR may be | |
3416 | a self-referential call; if we're calling ourselves, we need to | |
3417 | duplicate our body before altering anything. */ | |
3418 | copy_body (id, bb->count, bb->frequency, bb, return_block); | |
3419 | ||
d4e4baa9 | 3420 | /* Clean up. */ |
6be42dd4 | 3421 | pointer_map_destroy (id->decl_map); |
d4e4baa9 AO |
3422 | id->decl_map = st; |
3423 | ||
5006671f RG |
3424 | /* Unlink the calls virtual operands before replacing it. */ |
3425 | unlink_stmt_vdef (stmt); | |
3426 | ||
84936f6f | 3427 | /* If the inlined function returns a result that we care about, |
726a989a RB |
3428 | substitute the GIMPLE_CALL with an assignment of the return |
3429 | variable to the LHS of the call. That is, if STMT was | |
3430 | 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */ | |
3431 | if (use_retvar && gimple_call_lhs (stmt)) | |
e21aff8a | 3432 | { |
726a989a RB |
3433 | gimple old_stmt = stmt; |
3434 | stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar); | |
3435 | gsi_replace (&stmt_gsi, stmt, false); | |
110cfe1c | 3436 | if (gimple_in_ssa_p (cfun)) |
5006671f | 3437 | mark_symbols_for_renaming (stmt); |
726a989a | 3438 | maybe_clean_or_replace_eh_stmt (old_stmt, stmt); |
e21aff8a | 3439 | } |
6de9cd9a | 3440 | else |
110cfe1c | 3441 | { |
726a989a RB |
3442 | /* Handle the case of inlining a function with no return |
3443 | statement, which causes the return value to become undefined. */ | |
3444 | if (gimple_call_lhs (stmt) | |
3445 | && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME) | |
110cfe1c | 3446 | { |
726a989a RB |
3447 | tree name = gimple_call_lhs (stmt); |
3448 | tree var = SSA_NAME_VAR (name); | |
110cfe1c JH |
3449 | tree def = gimple_default_def (cfun, var); |
3450 | ||
110cfe1c JH |
3451 | if (def) |
3452 | { | |
726a989a RB |
3453 | /* If the variable is used undefined, make this name |
3454 | undefined via a move. */ | |
3455 | stmt = gimple_build_assign (gimple_call_lhs (stmt), def); | |
3456 | gsi_replace (&stmt_gsi, stmt, true); | |
110cfe1c | 3457 | } |
110cfe1c JH |
3458 | else |
3459 | { | |
726a989a RB |
3460 | /* Otherwise make this variable undefined. */ |
3461 | gsi_remove (&stmt_gsi, true); | |
110cfe1c | 3462 | set_default_def (var, name); |
726a989a | 3463 | SSA_NAME_DEF_STMT (name) = gimple_build_nop (); |
110cfe1c JH |
3464 | } |
3465 | } | |
3466 | else | |
726a989a | 3467 | gsi_remove (&stmt_gsi, true); |
110cfe1c | 3468 | } |
d4e4baa9 | 3469 | |
4f6c2131 | 3470 | if (purge_dead_abnormal_edges) |
726a989a | 3471 | gimple_purge_dead_abnormal_call_edges (return_block); |
84936f6f | 3472 | |
e21aff8a SB |
3473 | /* If the value of the new expression is ignored, that's OK. We |
3474 | don't warn about this for CALL_EXPRs, so we shouldn't warn about | |
3475 | the equivalent inlined version either. */ | |
726a989a RB |
3476 | if (is_gimple_assign (stmt)) |
3477 | { | |
3478 | gcc_assert (gimple_assign_single_p (stmt) | |
1a87cf0c | 3479 | || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))); |
726a989a RB |
3480 | TREE_USED (gimple_assign_rhs1 (stmt)) = 1; |
3481 | } | |
84936f6f | 3482 | |
1eb3331e DB |
3483 | /* Output the inlining info for this abstract function, since it has been |
3484 | inlined. If we don't do this now, we can lose the information about the | |
3485 | variables in the function when the blocks get blown away as soon as we | |
3486 | remove the cgraph node. */ | |
e21aff8a | 3487 | (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl); |
84936f6f | 3488 | |
e72fcfe8 | 3489 | /* Update callgraph if needed. */ |
e21aff8a | 3490 | cgraph_remove_node (cg_edge->callee); |
e72fcfe8 | 3491 | |
e21aff8a | 3492 | id->block = NULL_TREE; |
e21aff8a | 3493 | successfully_inlined = TRUE; |
742a37d5 | 3494 | |
6de9cd9a DN |
3495 | egress: |
3496 | input_location = saved_location; | |
e21aff8a | 3497 | return successfully_inlined; |
d4e4baa9 | 3498 | } |
6de9cd9a | 3499 | |
e21aff8a SB |
3500 | /* Expand call statements reachable from STMT_P. |
3501 | We can only have CALL_EXPRs as the "toplevel" tree code or nested | |
726a989a | 3502 | in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can |
e21aff8a SB |
3503 | unfortunately not use that function here because we need a pointer |
3504 | to the CALL_EXPR, not the tree itself. */ | |
3505 | ||
3506 | static bool | |
1b369fae | 3507 | gimple_expand_calls_inline (basic_block bb, copy_body_data *id) |
6de9cd9a | 3508 | { |
726a989a | 3509 | gimple_stmt_iterator gsi; |
6de9cd9a | 3510 | |
726a989a | 3511 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
6de9cd9a | 3512 | { |
726a989a | 3513 | gimple stmt = gsi_stmt (gsi); |
e21aff8a | 3514 | |
726a989a RB |
3515 | if (is_gimple_call (stmt) |
3516 | && expand_call_inline (bb, stmt, id)) | |
3517 | return true; | |
6de9cd9a | 3518 | } |
726a989a | 3519 | |
e21aff8a | 3520 | return false; |
6de9cd9a DN |
3521 | } |
3522 | ||
726a989a | 3523 | |
b8a00a4d JH |
3524 | /* Walk all basic blocks created after FIRST and try to fold every statement |
3525 | in the STATEMENTS pointer set. */ | |
726a989a | 3526 | |
b8a00a4d JH |
3527 | static void |
3528 | fold_marked_statements (int first, struct pointer_set_t *statements) | |
3529 | { | |
726a989a | 3530 | for (; first < n_basic_blocks; first++) |
b8a00a4d JH |
3531 | if (BASIC_BLOCK (first)) |
3532 | { | |
726a989a RB |
3533 | gimple_stmt_iterator gsi; |
3534 | ||
3535 | for (gsi = gsi_start_bb (BASIC_BLOCK (first)); | |
3536 | !gsi_end_p (gsi); | |
3537 | gsi_next (&gsi)) | |
3538 | if (pointer_set_contains (statements, gsi_stmt (gsi))) | |
9477eb38 | 3539 | { |
726a989a | 3540 | gimple old_stmt = gsi_stmt (gsi); |
2bafad93 | 3541 | |
726a989a | 3542 | if (fold_stmt (&gsi)) |
9477eb38 | 3543 | { |
726a989a RB |
3544 | /* Re-read the statement from GSI as fold_stmt() may |
3545 | have changed it. */ | |
3546 | gimple new_stmt = gsi_stmt (gsi); | |
3547 | update_stmt (new_stmt); | |
3548 | ||
3549 | if (is_gimple_call (old_stmt)) | |
3550 | cgraph_update_edges_for_call_stmt (old_stmt, new_stmt); | |
3551 | ||
3552 | if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt)) | |
3553 | gimple_purge_dead_eh_edges (BASIC_BLOCK (first)); | |
9477eb38 JH |
3554 | } |
3555 | } | |
b8a00a4d JH |
3556 | } |
3557 | } | |
3558 | ||
1084e689 JH |
3559 | /* Return true if BB has at least one abnormal outgoing edge. */ |
3560 | ||
3561 | static inline bool | |
3562 | has_abnormal_outgoing_edge_p (basic_block bb) | |
3563 | { | |
3564 | edge e; | |
3565 | edge_iterator ei; | |
3566 | ||
3567 | FOR_EACH_EDGE (e, ei, bb->succs) | |
3568 | if (e->flags & EDGE_ABNORMAL) | |
3569 | return true; | |
3570 | ||
3571 | return false; | |
3572 | } | |
3573 | ||
d4e4baa9 AO |
3574 | /* Expand calls to inline functions in the body of FN. */ |
3575 | ||
873aa8f5 | 3576 | unsigned int |
46c5ad27 | 3577 | optimize_inline_calls (tree fn) |
d4e4baa9 | 3578 | { |
1b369fae | 3579 | copy_body_data id; |
d4e4baa9 | 3580 | tree prev_fn; |
e21aff8a | 3581 | basic_block bb; |
b8a00a4d | 3582 | int last = n_basic_blocks; |
d406b663 JJ |
3583 | struct gimplify_ctx gctx; |
3584 | ||
c5b6f18e MM |
3585 | /* There is no point in performing inlining if errors have already |
3586 | occurred -- and we might crash if we try to inline invalid | |
3587 | code. */ | |
3588 | if (errorcount || sorrycount) | |
873aa8f5 | 3589 | return 0; |
c5b6f18e | 3590 | |
d4e4baa9 AO |
3591 | /* Clear out ID. */ |
3592 | memset (&id, 0, sizeof (id)); | |
3593 | ||
1b369fae RH |
3594 | id.src_node = id.dst_node = cgraph_node (fn); |
3595 | id.dst_fn = fn; | |
d4e4baa9 AO |
3596 | /* Or any functions that aren't finished yet. */ |
3597 | prev_fn = NULL_TREE; | |
3598 | if (current_function_decl) | |
3599 | { | |
1b369fae | 3600 | id.dst_fn = current_function_decl; |
d4e4baa9 AO |
3601 | prev_fn = current_function_decl; |
3602 | } | |
1b369fae RH |
3603 | |
3604 | id.copy_decl = copy_decl_maybe_to_var; | |
3605 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
3606 | id.transform_new_cfg = false; | |
3607 | id.transform_return_to_modify = true; | |
9ff420f1 | 3608 | id.transform_lang_insert_block = NULL; |
b8a00a4d | 3609 | id.statements_to_fold = pointer_set_create (); |
1b369fae | 3610 | |
d406b663 | 3611 | push_gimplify_context (&gctx); |
d4e4baa9 | 3612 | |
672987e8 ZD |
3613 | /* We make no attempts to keep dominance info up-to-date. */ |
3614 | free_dominance_info (CDI_DOMINATORS); | |
3615 | free_dominance_info (CDI_POST_DOMINATORS); | |
3616 | ||
726a989a RB |
3617 | /* Register specific gimple functions. */ |
3618 | gimple_register_cfg_hooks (); | |
3619 | ||
e21aff8a SB |
3620 | /* Reach the trees by walking over the CFG, and note the |
3621 | enclosing basic-blocks in the call edges. */ | |
3622 | /* We walk the blocks going forward, because inlined function bodies | |
3623 | will split id->current_basic_block, and the new blocks will | |
3624 | follow it; we'll trudge through them, processing their CALL_EXPRs | |
3625 | along the way. */ | |
3626 | FOR_EACH_BB (bb) | |
3627 | gimple_expand_calls_inline (bb, &id); | |
d4e4baa9 | 3628 | |
e21aff8a | 3629 | pop_gimplify_context (NULL); |
6de9cd9a | 3630 | |
18c6ada9 JH |
3631 | #ifdef ENABLE_CHECKING |
3632 | { | |
3633 | struct cgraph_edge *e; | |
3634 | ||
1b369fae | 3635 | verify_cgraph_node (id.dst_node); |
18c6ada9 JH |
3636 | |
3637 | /* Double check that we inlined everything we are supposed to inline. */ | |
1b369fae | 3638 | for (e = id.dst_node->callees; e; e = e->next_callee) |
1e128c5f | 3639 | gcc_assert (e->inline_failed); |
18c6ada9 JH |
3640 | } |
3641 | #endif | |
a9eafe81 AP |
3642 | |
3643 | /* Fold the statements before compacting/renumbering the basic blocks. */ | |
3644 | fold_marked_statements (last, id.statements_to_fold); | |
3645 | pointer_set_destroy (id.statements_to_fold); | |
3646 | ||
3647 | /* Renumber the (code) basic_blocks consecutively. */ | |
3648 | compact_blocks (); | |
3649 | /* Renumber the lexical scoping (non-code) blocks consecutively. */ | |
3650 | number_blocks (fn); | |
b8a00a4d | 3651 | |
873aa8f5 | 3652 | fold_cond_expr_cond (); |
726a989a | 3653 | |
110cfe1c JH |
3654 | /* It would be nice to check SSA/CFG/statement consistency here, but it is |
3655 | not possible yet - the IPA passes might make various functions to not | |
3656 | throw and they don't care to proactively update local EH info. This is | |
3657 | done later in fixup_cfg pass that also execute the verification. */ | |
726a989a RB |
3658 | return (TODO_update_ssa |
3659 | | TODO_cleanup_cfg | |
45a80bb9 JH |
3660 | | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0) |
3661 | | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0)); | |
d4e4baa9 AO |
3662 | } |
3663 | ||
d4e4baa9 AO |
3664 | /* Passed to walk_tree. Copies the node pointed to, if appropriate. */ |
3665 | ||
3666 | tree | |
46c5ad27 | 3667 | copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
d4e4baa9 AO |
3668 | { |
3669 | enum tree_code code = TREE_CODE (*tp); | |
07beea0d | 3670 | enum tree_code_class cl = TREE_CODE_CLASS (code); |
d4e4baa9 AO |
3671 | |
3672 | /* We make copies of most nodes. */ | |
07beea0d | 3673 | if (IS_EXPR_CODE_CLASS (cl) |
d4e4baa9 AO |
3674 | || code == TREE_LIST |
3675 | || code == TREE_VEC | |
8843c120 DN |
3676 | || code == TYPE_DECL |
3677 | || code == OMP_CLAUSE) | |
d4e4baa9 AO |
3678 | { |
3679 | /* Because the chain gets clobbered when we make a copy, we save it | |
3680 | here. */ | |
82d6e6fc | 3681 | tree chain = NULL_TREE, new_tree; |
07beea0d | 3682 | |
726a989a | 3683 | chain = TREE_CHAIN (*tp); |
d4e4baa9 AO |
3684 | |
3685 | /* Copy the node. */ | |
82d6e6fc | 3686 | new_tree = copy_node (*tp); |
6de9cd9a DN |
3687 | |
3688 | /* Propagate mudflap marked-ness. */ | |
3689 | if (flag_mudflap && mf_marked_p (*tp)) | |
82d6e6fc | 3690 | mf_mark (new_tree); |
6de9cd9a | 3691 | |
82d6e6fc | 3692 | *tp = new_tree; |
d4e4baa9 AO |
3693 | |
3694 | /* Now, restore the chain, if appropriate. That will cause | |
3695 | walk_tree to walk into the chain as well. */ | |
50674e96 DN |
3696 | if (code == PARM_DECL |
3697 | || code == TREE_LIST | |
aaf46ef9 | 3698 | || code == OMP_CLAUSE) |
d4e4baa9 AO |
3699 | TREE_CHAIN (*tp) = chain; |
3700 | ||
3701 | /* For now, we don't update BLOCKs when we make copies. So, we | |
6de9cd9a DN |
3702 | have to nullify all BIND_EXPRs. */ |
3703 | if (TREE_CODE (*tp) == BIND_EXPR) | |
3704 | BIND_EXPR_BLOCK (*tp) = NULL_TREE; | |
d4e4baa9 | 3705 | } |
4038c495 GB |
3706 | else if (code == CONSTRUCTOR) |
3707 | { | |
3708 | /* CONSTRUCTOR nodes need special handling because | |
3709 | we need to duplicate the vector of elements. */ | |
82d6e6fc | 3710 | tree new_tree; |
4038c495 | 3711 | |
82d6e6fc | 3712 | new_tree = copy_node (*tp); |
4038c495 GB |
3713 | |
3714 | /* Propagate mudflap marked-ness. */ | |
3715 | if (flag_mudflap && mf_marked_p (*tp)) | |
82d6e6fc | 3716 | mf_mark (new_tree); |
9f63daea | 3717 | |
82d6e6fc | 3718 | CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc, |
4038c495 | 3719 | CONSTRUCTOR_ELTS (*tp)); |
82d6e6fc | 3720 | *tp = new_tree; |
4038c495 | 3721 | } |
6615c446 | 3722 | else if (TREE_CODE_CLASS (code) == tcc_type) |
d4e4baa9 | 3723 | *walk_subtrees = 0; |
6615c446 | 3724 | else if (TREE_CODE_CLASS (code) == tcc_declaration) |
6de9cd9a | 3725 | *walk_subtrees = 0; |
a396f8ae GK |
3726 | else if (TREE_CODE_CLASS (code) == tcc_constant) |
3727 | *walk_subtrees = 0; | |
1e128c5f GB |
3728 | else |
3729 | gcc_assert (code != STATEMENT_LIST); | |
d4e4baa9 AO |
3730 | return NULL_TREE; |
3731 | } | |
3732 | ||
3733 | /* The SAVE_EXPR pointed to by TP is being copied. If ST contains | |
aa4a53af | 3734 | information indicating to what new SAVE_EXPR this one should be mapped, |
e21aff8a SB |
3735 | use that one. Otherwise, create a new node and enter it in ST. FN is |
3736 | the function into which the copy will be placed. */ | |
d4e4baa9 | 3737 | |
892c7e1e | 3738 | static void |
82c82743 | 3739 | remap_save_expr (tree *tp, void *st_, int *walk_subtrees) |
d4e4baa9 | 3740 | { |
6be42dd4 RG |
3741 | struct pointer_map_t *st = (struct pointer_map_t *) st_; |
3742 | tree *n; | |
5e20bdd7 | 3743 | tree t; |
d4e4baa9 AO |
3744 | |
3745 | /* See if we already encountered this SAVE_EXPR. */ | |
6be42dd4 | 3746 | n = (tree *) pointer_map_contains (st, *tp); |
d92b4486 | 3747 | |
d4e4baa9 AO |
3748 | /* If we didn't already remap this SAVE_EXPR, do so now. */ |
3749 | if (!n) | |
3750 | { | |
5e20bdd7 | 3751 | t = copy_node (*tp); |
d4e4baa9 | 3752 | |
d4e4baa9 | 3753 | /* Remember this SAVE_EXPR. */ |
6be42dd4 | 3754 | *pointer_map_insert (st, *tp) = t; |
350ebd54 | 3755 | /* Make sure we don't remap an already-remapped SAVE_EXPR. */ |
6be42dd4 | 3756 | *pointer_map_insert (st, t) = t; |
d4e4baa9 AO |
3757 | } |
3758 | else | |
5e20bdd7 JZ |
3759 | { |
3760 | /* We've already walked into this SAVE_EXPR; don't do it again. */ | |
3761 | *walk_subtrees = 0; | |
6be42dd4 | 3762 | t = *n; |
5e20bdd7 | 3763 | } |
d4e4baa9 AO |
3764 | |
3765 | /* Replace this SAVE_EXPR with the copy. */ | |
5e20bdd7 | 3766 | *tp = t; |
d4e4baa9 | 3767 | } |
d436bff8 | 3768 | |
aa4a53af RK |
3769 | /* Called via walk_tree. If *TP points to a DECL_STMT for a local label, |
3770 | copies the declaration and enters it in the splay_tree in DATA (which is | |
1b369fae | 3771 | really an `copy_body_data *'). */ |
6de9cd9a DN |
3772 | |
3773 | static tree | |
3774 | mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, | |
3775 | void *data) | |
3776 | { | |
1b369fae | 3777 | copy_body_data *id = (copy_body_data *) data; |
6de9cd9a DN |
3778 | |
3779 | /* Don't walk into types. */ | |
350fae66 RK |
3780 | if (TYPE_P (*tp)) |
3781 | *walk_subtrees = 0; | |
6de9cd9a | 3782 | |
350fae66 | 3783 | else if (TREE_CODE (*tp) == LABEL_EXPR) |
6de9cd9a | 3784 | { |
350fae66 | 3785 | tree decl = TREE_OPERAND (*tp, 0); |
6de9cd9a | 3786 | |
350fae66 | 3787 | /* Copy the decl and remember the copy. */ |
1b369fae | 3788 | insert_decl_map (id, decl, id->copy_decl (decl, id)); |
6de9cd9a DN |
3789 | } |
3790 | ||
3791 | return NULL_TREE; | |
3792 | } | |
3793 | ||
19114537 EC |
3794 | /* Perform any modifications to EXPR required when it is unsaved. Does |
3795 | not recurse into EXPR's subtrees. */ | |
3796 | ||
3797 | static void | |
3798 | unsave_expr_1 (tree expr) | |
3799 | { | |
3800 | switch (TREE_CODE (expr)) | |
3801 | { | |
3802 | case TARGET_EXPR: | |
3803 | /* Don't mess with a TARGET_EXPR that hasn't been expanded. | |
3804 | It's OK for this to happen if it was part of a subtree that | |
3805 | isn't immediately expanded, such as operand 2 of another | |
3806 | TARGET_EXPR. */ | |
3807 | if (TREE_OPERAND (expr, 1)) | |
3808 | break; | |
3809 | ||
3810 | TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3); | |
3811 | TREE_OPERAND (expr, 3) = NULL_TREE; | |
3812 | break; | |
3813 | ||
3814 | default: | |
3815 | break; | |
3816 | } | |
3817 | } | |
3818 | ||
6de9cd9a DN |
3819 | /* Called via walk_tree when an expression is unsaved. Using the |
3820 | splay_tree pointed to by ST (which is really a `splay_tree'), | |
3821 | remaps all local declarations to appropriate replacements. */ | |
d436bff8 AH |
3822 | |
3823 | static tree | |
6de9cd9a | 3824 | unsave_r (tree *tp, int *walk_subtrees, void *data) |
d436bff8 | 3825 | { |
1b369fae | 3826 | copy_body_data *id = (copy_body_data *) data; |
6be42dd4 RG |
3827 | struct pointer_map_t *st = id->decl_map; |
3828 | tree *n; | |
6de9cd9a DN |
3829 | |
3830 | /* Only a local declaration (variable or label). */ | |
3831 | if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp)) | |
3832 | || TREE_CODE (*tp) == LABEL_DECL) | |
3833 | { | |
3834 | /* Lookup the declaration. */ | |
6be42dd4 | 3835 | n = (tree *) pointer_map_contains (st, *tp); |
9f63daea | 3836 | |
6de9cd9a DN |
3837 | /* If it's there, remap it. */ |
3838 | if (n) | |
6be42dd4 | 3839 | *tp = *n; |
6de9cd9a | 3840 | } |
aa4a53af | 3841 | |
6de9cd9a | 3842 | else if (TREE_CODE (*tp) == STATEMENT_LIST) |
726a989a | 3843 | gcc_unreachable (); |
6de9cd9a DN |
3844 | else if (TREE_CODE (*tp) == BIND_EXPR) |
3845 | copy_bind_expr (tp, walk_subtrees, id); | |
3846 | else if (TREE_CODE (*tp) == SAVE_EXPR) | |
82c82743 | 3847 | remap_save_expr (tp, st, walk_subtrees); |
d436bff8 | 3848 | else |
6de9cd9a DN |
3849 | { |
3850 | copy_tree_r (tp, walk_subtrees, NULL); | |
3851 | ||
3852 | /* Do whatever unsaving is required. */ | |
3853 | unsave_expr_1 (*tp); | |
3854 | } | |
3855 | ||
3856 | /* Keep iterating. */ | |
3857 | return NULL_TREE; | |
d436bff8 AH |
3858 | } |
3859 | ||
19114537 EC |
3860 | /* Copies everything in EXPR and replaces variables, labels |
3861 | and SAVE_EXPRs local to EXPR. */ | |
6de9cd9a DN |
3862 | |
3863 | tree | |
19114537 | 3864 | unsave_expr_now (tree expr) |
6de9cd9a | 3865 | { |
1b369fae | 3866 | copy_body_data id; |
6de9cd9a DN |
3867 | |
3868 | /* There's nothing to do for NULL_TREE. */ | |
3869 | if (expr == 0) | |
3870 | return expr; | |
3871 | ||
3872 | /* Set up ID. */ | |
3873 | memset (&id, 0, sizeof (id)); | |
1b369fae RH |
3874 | id.src_fn = current_function_decl; |
3875 | id.dst_fn = current_function_decl; | |
6be42dd4 | 3876 | id.decl_map = pointer_map_create (); |
6de9cd9a | 3877 | |
1b369fae RH |
3878 | id.copy_decl = copy_decl_no_change; |
3879 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
3880 | id.transform_new_cfg = false; | |
3881 | id.transform_return_to_modify = false; | |
9ff420f1 | 3882 | id.transform_lang_insert_block = NULL; |
1b369fae | 3883 | |
6de9cd9a DN |
3884 | /* Walk the tree once to find local labels. */ |
3885 | walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id); | |
3886 | ||
3887 | /* Walk the tree again, copying, remapping, and unsaving. */ | |
3888 | walk_tree (&expr, unsave_r, &id, NULL); | |
3889 | ||
3890 | /* Clean up. */ | |
6be42dd4 | 3891 | pointer_map_destroy (id.decl_map); |
6de9cd9a DN |
3892 | |
3893 | return expr; | |
3894 | } | |
3895 | ||
726a989a RB |
3896 | /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local |
3897 | label, copies the declaration and enters it in the splay_tree in DATA (which | |
3898 | is really a 'copy_body_data *'. */ | |
3899 | ||
3900 | static tree | |
3901 | mark_local_labels_stmt (gimple_stmt_iterator *gsip, | |
3902 | bool *handled_ops_p ATTRIBUTE_UNUSED, | |
3903 | struct walk_stmt_info *wi) | |
3904 | { | |
3905 | copy_body_data *id = (copy_body_data *) wi->info; | |
3906 | gimple stmt = gsi_stmt (*gsip); | |
3907 | ||
3908 | if (gimple_code (stmt) == GIMPLE_LABEL) | |
3909 | { | |
3910 | tree decl = gimple_label_label (stmt); | |
3911 | ||
3912 | /* Copy the decl and remember the copy. */ | |
3913 | insert_decl_map (id, decl, id->copy_decl (decl, id)); | |
3914 | } | |
3915 | ||
3916 | return NULL_TREE; | |
3917 | } | |
3918 | ||
3919 | ||
3920 | /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local. | |
3921 | Using the splay_tree pointed to by ST (which is really a `splay_tree'), | |
3922 | remaps all local declarations to appropriate replacements in gimple | |
3923 | operands. */ | |
3924 | ||
3925 | static tree | |
3926 | replace_locals_op (tree *tp, int *walk_subtrees, void *data) | |
3927 | { | |
3928 | struct walk_stmt_info *wi = (struct walk_stmt_info*) data; | |
3929 | copy_body_data *id = (copy_body_data *) wi->info; | |
3930 | struct pointer_map_t *st = id->decl_map; | |
3931 | tree *n; | |
3932 | tree expr = *tp; | |
3933 | ||
3934 | /* Only a local declaration (variable or label). */ | |
3935 | if ((TREE_CODE (expr) == VAR_DECL | |
3936 | && !TREE_STATIC (expr)) | |
3937 | || TREE_CODE (expr) == LABEL_DECL) | |
3938 | { | |
3939 | /* Lookup the declaration. */ | |
3940 | n = (tree *) pointer_map_contains (st, expr); | |
3941 | ||
3942 | /* If it's there, remap it. */ | |
3943 | if (n) | |
3944 | *tp = *n; | |
3945 | *walk_subtrees = 0; | |
3946 | } | |
3947 | else if (TREE_CODE (expr) == STATEMENT_LIST | |
3948 | || TREE_CODE (expr) == BIND_EXPR | |
3949 | || TREE_CODE (expr) == SAVE_EXPR) | |
3950 | gcc_unreachable (); | |
3951 | else if (TREE_CODE (expr) == TARGET_EXPR) | |
3952 | { | |
3953 | /* Don't mess with a TARGET_EXPR that hasn't been expanded. | |
3954 | It's OK for this to happen if it was part of a subtree that | |
3955 | isn't immediately expanded, such as operand 2 of another | |
3956 | TARGET_EXPR. */ | |
3957 | if (!TREE_OPERAND (expr, 1)) | |
3958 | { | |
3959 | TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3); | |
3960 | TREE_OPERAND (expr, 3) = NULL_TREE; | |
3961 | } | |
3962 | } | |
3963 | ||
3964 | /* Keep iterating. */ | |
3965 | return NULL_TREE; | |
3966 | } | |
3967 | ||
3968 | ||
3969 | /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local. | |
3970 | Using the splay_tree pointed to by ST (which is really a `splay_tree'), | |
3971 | remaps all local declarations to appropriate replacements in gimple | |
3972 | statements. */ | |
3973 | ||
3974 | static tree | |
3975 | replace_locals_stmt (gimple_stmt_iterator *gsip, | |
3976 | bool *handled_ops_p ATTRIBUTE_UNUSED, | |
3977 | struct walk_stmt_info *wi) | |
3978 | { | |
3979 | copy_body_data *id = (copy_body_data *) wi->info; | |
3980 | gimple stmt = gsi_stmt (*gsip); | |
3981 | ||
3982 | if (gimple_code (stmt) == GIMPLE_BIND) | |
3983 | { | |
3984 | tree block = gimple_bind_block (stmt); | |
3985 | ||
3986 | if (block) | |
3987 | { | |
3988 | remap_block (&block, id); | |
3989 | gimple_bind_set_block (stmt, block); | |
3990 | } | |
3991 | ||
3992 | /* This will remap a lot of the same decls again, but this should be | |
3993 | harmless. */ | |
3994 | if (gimple_bind_vars (stmt)) | |
526d73ab | 3995 | gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id)); |
726a989a RB |
3996 | } |
3997 | ||
3998 | /* Keep iterating. */ | |
3999 | return NULL_TREE; | |
4000 | } | |
4001 | ||
4002 | ||
4003 | /* Copies everything in SEQ and replaces variables and labels local to | |
4004 | current_function_decl. */ | |
4005 | ||
4006 | gimple_seq | |
4007 | copy_gimple_seq_and_replace_locals (gimple_seq seq) | |
4008 | { | |
4009 | copy_body_data id; | |
4010 | struct walk_stmt_info wi; | |
4011 | struct pointer_set_t *visited; | |
4012 | gimple_seq copy; | |
4013 | ||
4014 | /* There's nothing to do for NULL_TREE. */ | |
4015 | if (seq == NULL) | |
4016 | return seq; | |
4017 | ||
4018 | /* Set up ID. */ | |
4019 | memset (&id, 0, sizeof (id)); | |
4020 | id.src_fn = current_function_decl; | |
4021 | id.dst_fn = current_function_decl; | |
4022 | id.decl_map = pointer_map_create (); | |
4023 | ||
4024 | id.copy_decl = copy_decl_no_change; | |
4025 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; | |
4026 | id.transform_new_cfg = false; | |
4027 | id.transform_return_to_modify = false; | |
4028 | id.transform_lang_insert_block = NULL; | |
4029 | ||
4030 | /* Walk the tree once to find local labels. */ | |
4031 | memset (&wi, 0, sizeof (wi)); | |
4032 | visited = pointer_set_create (); | |
4033 | wi.info = &id; | |
4034 | wi.pset = visited; | |
4035 | walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi); | |
4036 | pointer_set_destroy (visited); | |
4037 | ||
4038 | copy = gimple_seq_copy (seq); | |
4039 | ||
4040 | /* Walk the copy, remapping decls. */ | |
4041 | memset (&wi, 0, sizeof (wi)); | |
4042 | wi.info = &id; | |
4043 | walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi); | |
4044 | ||
4045 | /* Clean up. */ | |
4046 | pointer_map_destroy (id.decl_map); | |
4047 | ||
4048 | return copy; | |
4049 | } | |
4050 | ||
4051 | ||
6de9cd9a | 4052 | /* Allow someone to determine if SEARCH is a child of TOP from gdb. */ |
aa4a53af | 4053 | |
6de9cd9a DN |
4054 | static tree |
4055 | debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data) | |
4056 | { | |
4057 | if (*tp == data) | |
4058 | return (tree) data; | |
4059 | else | |
4060 | return NULL; | |
4061 | } | |
4062 | ||
6de9cd9a DN |
4063 | bool |
4064 | debug_find_tree (tree top, tree search) | |
4065 | { | |
4066 | return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0; | |
4067 | } | |
4068 | ||
e21aff8a | 4069 | |
6de9cd9a DN |
4070 | /* Declare the variables created by the inliner. Add all the variables in |
4071 | VARS to BIND_EXPR. */ | |
4072 | ||
4073 | static void | |
e21aff8a | 4074 | declare_inline_vars (tree block, tree vars) |
6de9cd9a | 4075 | { |
84936f6f RH |
4076 | tree t; |
4077 | for (t = vars; t; t = TREE_CHAIN (t)) | |
9659ce8b JH |
4078 | { |
4079 | DECL_SEEN_IN_BIND_EXPR_P (t) = 1; | |
4080 | gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t)); | |
cb91fab0 | 4081 | cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls); |
9659ce8b | 4082 | } |
6de9cd9a | 4083 | |
e21aff8a SB |
4084 | if (block) |
4085 | BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars); | |
4086 | } | |
4087 | ||
19734dd8 | 4088 | /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN, |
1b369fae RH |
4089 | but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to |
4090 | VAR_DECL translation. */ | |
19734dd8 | 4091 | |
1b369fae RH |
4092 | static tree |
4093 | copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy) | |
19734dd8 | 4094 | { |
19734dd8 RL |
4095 | /* Don't generate debug information for the copy if we wouldn't have |
4096 | generated it for the copy either. */ | |
4097 | DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl); | |
4098 | DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl); | |
4099 | ||
4100 | /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what | |
4101 | declaration inspired this copy. */ | |
4102 | DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl); | |
4103 | ||
4104 | /* The new variable/label has no RTL, yet. */ | |
68a976f2 RL |
4105 | if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL) |
4106 | && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy)) | |
19734dd8 RL |
4107 | SET_DECL_RTL (copy, NULL_RTX); |
4108 | ||
4109 | /* These args would always appear unused, if not for this. */ | |
4110 | TREE_USED (copy) = 1; | |
4111 | ||
4112 | /* Set the context for the new declaration. */ | |
4113 | if (!DECL_CONTEXT (decl)) | |
4114 | /* Globals stay global. */ | |
4115 | ; | |
1b369fae | 4116 | else if (DECL_CONTEXT (decl) != id->src_fn) |
19734dd8 RL |
4117 | /* Things that weren't in the scope of the function we're inlining |
4118 | from aren't in the scope we're inlining to, either. */ | |
4119 | ; | |
4120 | else if (TREE_STATIC (decl)) | |
4121 | /* Function-scoped static variables should stay in the original | |
4122 | function. */ | |
4123 | ; | |
4124 | else | |
4125 | /* Ordinary automatic local variables are now in the scope of the | |
4126 | new function. */ | |
1b369fae | 4127 | DECL_CONTEXT (copy) = id->dst_fn; |
19734dd8 RL |
4128 | |
4129 | return copy; | |
4130 | } | |
4131 | ||
1b369fae RH |
4132 | static tree |
4133 | copy_decl_to_var (tree decl, copy_body_data *id) | |
4134 | { | |
4135 | tree copy, type; | |
4136 | ||
4137 | gcc_assert (TREE_CODE (decl) == PARM_DECL | |
4138 | || TREE_CODE (decl) == RESULT_DECL); | |
4139 | ||
4140 | type = TREE_TYPE (decl); | |
4141 | ||
4142 | copy = build_decl (VAR_DECL, DECL_NAME (decl), type); | |
4143 | TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl); | |
4144 | TREE_READONLY (copy) = TREE_READONLY (decl); | |
4145 | TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl); | |
0890b981 | 4146 | DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl); |
058dcc25 | 4147 | DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl); |
1b369fae RH |
4148 | |
4149 | return copy_decl_for_dup_finish (id, decl, copy); | |
4150 | } | |
4151 | ||
c08cd4c1 JM |
4152 | /* Like copy_decl_to_var, but create a return slot object instead of a |
4153 | pointer variable for return by invisible reference. */ | |
4154 | ||
4155 | static tree | |
4156 | copy_result_decl_to_var (tree decl, copy_body_data *id) | |
4157 | { | |
4158 | tree copy, type; | |
4159 | ||
4160 | gcc_assert (TREE_CODE (decl) == PARM_DECL | |
4161 | || TREE_CODE (decl) == RESULT_DECL); | |
4162 | ||
4163 | type = TREE_TYPE (decl); | |
4164 | if (DECL_BY_REFERENCE (decl)) | |
4165 | type = TREE_TYPE (type); | |
4166 | ||
4167 | copy = build_decl (VAR_DECL, DECL_NAME (decl), type); | |
4168 | TREE_READONLY (copy) = TREE_READONLY (decl); | |
4169 | TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl); | |
4170 | if (!DECL_BY_REFERENCE (decl)) | |
4171 | { | |
4172 | TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl); | |
0890b981 | 4173 | DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl); |
058dcc25 | 4174 | DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl); |
c08cd4c1 JM |
4175 | } |
4176 | ||
4177 | return copy_decl_for_dup_finish (id, decl, copy); | |
4178 | } | |
4179 | ||
9ff420f1 | 4180 | tree |
1b369fae RH |
4181 | copy_decl_no_change (tree decl, copy_body_data *id) |
4182 | { | |
4183 | tree copy; | |
4184 | ||
4185 | copy = copy_node (decl); | |
4186 | ||
4187 | /* The COPY is not abstract; it will be generated in DST_FN. */ | |
4188 | DECL_ABSTRACT (copy) = 0; | |
4189 | lang_hooks.dup_lang_specific_decl (copy); | |
4190 | ||
4191 | /* TREE_ADDRESSABLE isn't used to indicate that a label's address has | |
4192 | been taken; it's for internal bookkeeping in expand_goto_internal. */ | |
4193 | if (TREE_CODE (copy) == LABEL_DECL) | |
4194 | { | |
4195 | TREE_ADDRESSABLE (copy) = 0; | |
4196 | LABEL_DECL_UID (copy) = -1; | |
4197 | } | |
4198 | ||
4199 | return copy_decl_for_dup_finish (id, decl, copy); | |
4200 | } | |
4201 | ||
4202 | static tree | |
4203 | copy_decl_maybe_to_var (tree decl, copy_body_data *id) | |
4204 | { | |
4205 | if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL) | |
4206 | return copy_decl_to_var (decl, id); | |
4207 | else | |
4208 | return copy_decl_no_change (decl, id); | |
4209 | } | |
4210 | ||
19734dd8 RL |
4211 | /* Return a copy of the function's argument tree. */ |
4212 | static tree | |
c6f7cfc1 JH |
4213 | copy_arguments_for_versioning (tree orig_parm, copy_body_data * id, |
4214 | bitmap args_to_skip, tree *vars) | |
19734dd8 | 4215 | { |
c6f7cfc1 JH |
4216 | tree arg, *parg; |
4217 | tree new_parm = NULL; | |
4218 | int i = 0; | |
19734dd8 | 4219 | |
c6f7cfc1 JH |
4220 | parg = &new_parm; |
4221 | ||
4222 | for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++) | |
4223 | if (!args_to_skip || !bitmap_bit_p (args_to_skip, i)) | |
4224 | { | |
4225 | tree new_tree = remap_decl (arg, id); | |
4226 | lang_hooks.dup_lang_specific_decl (new_tree); | |
4227 | *parg = new_tree; | |
4228 | parg = &TREE_CHAIN (new_tree); | |
4229 | } | |
eb50f5f4 | 4230 | else if (!pointer_map_contains (id->decl_map, arg)) |
c6f7cfc1 JH |
4231 | { |
4232 | /* Make an equivalent VAR_DECL. If the argument was used | |
4233 | as temporary variable later in function, the uses will be | |
4234 | replaced by local variable. */ | |
4235 | tree var = copy_decl_to_var (arg, id); | |
4236 | get_var_ann (var); | |
4237 | add_referenced_var (var); | |
4238 | insert_decl_map (id, arg, var); | |
4239 | /* Declare this new variable. */ | |
4240 | TREE_CHAIN (var) = *vars; | |
4241 | *vars = var; | |
4242 | } | |
4243 | return new_parm; | |
19734dd8 RL |
4244 | } |
4245 | ||
4246 | /* Return a copy of the function's static chain. */ | |
4247 | static tree | |
1b369fae | 4248 | copy_static_chain (tree static_chain, copy_body_data * id) |
19734dd8 RL |
4249 | { |
4250 | tree *chain_copy, *pvar; | |
4251 | ||
4252 | chain_copy = &static_chain; | |
4253 | for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar)) | |
4254 | { | |
82d6e6fc KG |
4255 | tree new_tree = remap_decl (*pvar, id); |
4256 | lang_hooks.dup_lang_specific_decl (new_tree); | |
4257 | TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar); | |
4258 | *pvar = new_tree; | |
19734dd8 RL |
4259 | } |
4260 | return static_chain; | |
4261 | } | |
4262 | ||
4263 | /* Return true if the function is allowed to be versioned. | |
4264 | This is a guard for the versioning functionality. */ | |
4265 | bool | |
4266 | tree_versionable_function_p (tree fndecl) | |
4267 | { | |
4268 | if (fndecl == NULL_TREE) | |
4269 | return false; | |
4270 | /* ??? There are cases where a function is | |
4271 | uninlinable but can be versioned. */ | |
4272 | if (!tree_inlinable_function_p (fndecl)) | |
4273 | return false; | |
4274 | ||
4275 | return true; | |
4276 | } | |
4277 | ||
9f5e9983 JJ |
4278 | /* Create a new name for omp child function. Returns an identifier. */ |
4279 | ||
4280 | static GTY(()) unsigned int clone_fn_id_num; | |
4281 | ||
4282 | static tree | |
4283 | clone_function_name (tree decl) | |
4284 | { | |
4285 | tree name = DECL_ASSEMBLER_NAME (decl); | |
4286 | size_t len = IDENTIFIER_LENGTH (name); | |
4287 | char *tmp_name, *prefix; | |
4288 | ||
4289 | prefix = XALLOCAVEC (char, len + strlen ("_clone") + 1); | |
4290 | memcpy (prefix, IDENTIFIER_POINTER (name), len); | |
4291 | strcpy (prefix + len, "_clone"); | |
4292 | #ifndef NO_DOT_IN_LABEL | |
4293 | prefix[len] = '.'; | |
4294 | #elif !defined NO_DOLLAR_IN_LABEL | |
4295 | prefix[len] = '$'; | |
4296 | #endif | |
4297 | ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++); | |
4298 | return get_identifier (tmp_name); | |
4299 | } | |
4300 | ||
19734dd8 RL |
4301 | /* Create a copy of a function's tree. |
4302 | OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes | |
4303 | of the original function and the new copied function | |
4304 | respectively. In case we want to replace a DECL | |
4305 | tree with another tree while duplicating the function's | |
4306 | body, TREE_MAP represents the mapping between these | |
ea99e0be JH |
4307 | trees. If UPDATE_CLONES is set, the call_stmt fields |
4308 | of edges of clones of the function will be updated. */ | |
19734dd8 | 4309 | void |
ea99e0be | 4310 | tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map, |
c6f7cfc1 | 4311 | bool update_clones, bitmap args_to_skip) |
19734dd8 RL |
4312 | { |
4313 | struct cgraph_node *old_version_node; | |
4314 | struct cgraph_node *new_version_node; | |
1b369fae | 4315 | copy_body_data id; |
110cfe1c | 4316 | tree p; |
19734dd8 RL |
4317 | unsigned i; |
4318 | struct ipa_replace_map *replace_info; | |
4319 | basic_block old_entry_block; | |
0f1961a2 JH |
4320 | VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10); |
4321 | ||
19734dd8 | 4322 | tree t_step; |
873aa8f5 | 4323 | tree old_current_function_decl = current_function_decl; |
0f1961a2 | 4324 | tree vars = NULL_TREE; |
19734dd8 RL |
4325 | |
4326 | gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL | |
4327 | && TREE_CODE (new_decl) == FUNCTION_DECL); | |
4328 | DECL_POSSIBLY_INLINED (old_decl) = 1; | |
4329 | ||
4330 | old_version_node = cgraph_node (old_decl); | |
4331 | new_version_node = cgraph_node (new_decl); | |
4332 | ||
a3aadcc5 JH |
4333 | /* Output the inlining info for this abstract function, since it has been |
4334 | inlined. If we don't do this now, we can lose the information about the | |
4335 | variables in the function when the blocks get blown away as soon as we | |
4336 | remove the cgraph node. */ | |
4337 | (*debug_hooks->outlining_inline_function) (old_decl); | |
4338 | ||
19734dd8 RL |
4339 | DECL_ARTIFICIAL (new_decl) = 1; |
4340 | DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl); | |
4341 | ||
3d283195 JH |
4342 | /* Prepare the data structures for the tree copy. */ |
4343 | memset (&id, 0, sizeof (id)); | |
4344 | ||
19734dd8 | 4345 | /* Generate a new name for the new version. */ |
ea99e0be | 4346 | if (!update_clones) |
19734dd8 | 4347 | { |
9f5e9983 | 4348 | DECL_NAME (new_decl) = clone_function_name (old_decl); |
95c8e172 RL |
4349 | SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl)); |
4350 | SET_DECL_RTL (new_decl, NULL_RTX); | |
3d283195 | 4351 | id.statements_to_fold = pointer_set_create (); |
19734dd8 | 4352 | } |
19734dd8 | 4353 | |
6be42dd4 | 4354 | id.decl_map = pointer_map_create (); |
1b369fae RH |
4355 | id.src_fn = old_decl; |
4356 | id.dst_fn = new_decl; | |
4357 | id.src_node = old_version_node; | |
4358 | id.dst_node = new_version_node; | |
4359 | id.src_cfun = DECL_STRUCT_FUNCTION (old_decl); | |
19734dd8 | 4360 | |
1b369fae RH |
4361 | id.copy_decl = copy_decl_no_change; |
4362 | id.transform_call_graph_edges | |
4363 | = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE; | |
4364 | id.transform_new_cfg = true; | |
4365 | id.transform_return_to_modify = false; | |
9ff420f1 | 4366 | id.transform_lang_insert_block = NULL; |
1b369fae | 4367 | |
19734dd8 | 4368 | current_function_decl = new_decl; |
110cfe1c JH |
4369 | old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION |
4370 | (DECL_STRUCT_FUNCTION (old_decl)); | |
4371 | initialize_cfun (new_decl, old_decl, | |
4372 | old_entry_block->count, | |
4373 | old_entry_block->frequency); | |
4374 | push_cfun (DECL_STRUCT_FUNCTION (new_decl)); | |
19734dd8 RL |
4375 | |
4376 | /* Copy the function's static chain. */ | |
4377 | p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl; | |
4378 | if (p) | |
4379 | DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl = | |
4380 | copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl, | |
4381 | &id); | |
0f1961a2 | 4382 | |
19734dd8 RL |
4383 | /* If there's a tree_map, prepare for substitution. */ |
4384 | if (tree_map) | |
4385 | for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++) | |
4386 | { | |
0f1961a2 | 4387 | gimple init; |
726a989a RB |
4388 | replace_info |
4389 | = (struct ipa_replace_map *) VARRAY_GENERIC_PTR (tree_map, i); | |
1b369fae | 4390 | if (replace_info->replace_p) |
00fc2333 | 4391 | { |
657c0925 JH |
4392 | tree op = replace_info->new_tree; |
4393 | ||
4394 | STRIP_NOPS (op); | |
4395 | ||
4396 | if (TREE_CODE (op) == VIEW_CONVERT_EXPR) | |
4397 | op = TREE_OPERAND (op, 0); | |
4398 | ||
4399 | if (TREE_CODE (op) == ADDR_EXPR) | |
00fc2333 | 4400 | { |
657c0925 | 4401 | op = TREE_OPERAND (op, 0); |
00fc2333 JH |
4402 | while (handled_component_p (op)) |
4403 | op = TREE_OPERAND (op, 0); | |
4404 | if (TREE_CODE (op) == VAR_DECL) | |
4405 | add_referenced_var (op); | |
4406 | } | |
0f1961a2 JH |
4407 | gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL); |
4408 | init = setup_one_parameter (&id, replace_info->old_tree, | |
4409 | replace_info->new_tree, id.src_fn, | |
4410 | NULL, | |
4411 | &vars); | |
4412 | if (init) | |
4413 | VEC_safe_push (gimple, heap, init_stmts, init); | |
00fc2333 | 4414 | } |
19734dd8 | 4415 | } |
eb50f5f4 JH |
4416 | /* Copy the function's arguments. */ |
4417 | if (DECL_ARGUMENTS (old_decl) != NULL_TREE) | |
4418 | DECL_ARGUMENTS (new_decl) = | |
4419 | copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id, | |
4420 | args_to_skip, &vars); | |
4421 | ||
4422 | DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id); | |
4423 | ||
4424 | /* Renumber the lexical scoping (non-code) blocks consecutively. */ | |
4425 | number_blocks (id.dst_fn); | |
19734dd8 | 4426 | |
0f1961a2 | 4427 | declare_inline_vars (DECL_INITIAL (new_decl), vars); |
cb91fab0 | 4428 | if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE) |
19734dd8 | 4429 | /* Add local vars. */ |
cb91fab0 | 4430 | for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls; |
19734dd8 RL |
4431 | t_step; t_step = TREE_CHAIN (t_step)) |
4432 | { | |
4433 | tree var = TREE_VALUE (t_step); | |
4434 | if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var)) | |
cb91fab0 | 4435 | cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls); |
526d73ab | 4436 | else if (!can_be_nonlocal (var, &id)) |
cb91fab0 | 4437 | cfun->local_decls = |
19734dd8 | 4438 | tree_cons (NULL_TREE, remap_decl (var, &id), |
cb91fab0 | 4439 | cfun->local_decls); |
19734dd8 RL |
4440 | } |
4441 | ||
4442 | /* Copy the Function's body. */ | |
110cfe1c | 4443 | copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR); |
19734dd8 | 4444 | |
19734dd8 RL |
4445 | if (DECL_RESULT (old_decl) != NULL_TREE) |
4446 | { | |
4447 | tree *res_decl = &DECL_RESULT (old_decl); | |
4448 | DECL_RESULT (new_decl) = remap_decl (*res_decl, &id); | |
4449 | lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl)); | |
4450 | } | |
4451 | ||
19734dd8 RL |
4452 | /* Renumber the lexical scoping (non-code) blocks consecutively. */ |
4453 | number_blocks (new_decl); | |
4454 | ||
0f1961a2 JH |
4455 | if (VEC_length (gimple, init_stmts)) |
4456 | { | |
4457 | basic_block bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR)); | |
4458 | while (VEC_length (gimple, init_stmts)) | |
4459 | insert_init_stmt (bb, VEC_pop (gimple, init_stmts)); | |
4460 | } | |
4461 | ||
19734dd8 | 4462 | /* Clean up. */ |
6be42dd4 | 4463 | pointer_map_destroy (id.decl_map); |
5006671f RG |
4464 | free_dominance_info (CDI_DOMINATORS); |
4465 | free_dominance_info (CDI_POST_DOMINATORS); | |
3d283195 JH |
4466 | if (!update_clones) |
4467 | { | |
4468 | fold_marked_statements (0, id.statements_to_fold); | |
4469 | pointer_set_destroy (id.statements_to_fold); | |
4470 | fold_cond_expr_cond (); | |
5006671f | 4471 | delete_unreachable_blocks (); |
110cfe1c JH |
4472 | update_ssa (TODO_update_ssa); |
4473 | } | |
0f1961a2 | 4474 | VEC_free (gimple, heap, init_stmts); |
110cfe1c | 4475 | pop_cfun (); |
873aa8f5 JH |
4476 | current_function_decl = old_current_function_decl; |
4477 | gcc_assert (!current_function_decl | |
4478 | || DECL_STRUCT_FUNCTION (current_function_decl) == cfun); | |
19734dd8 RL |
4479 | return; |
4480 | } | |
4481 | ||
52dd234b RH |
4482 | /* Duplicate a type, fields and all. */ |
4483 | ||
4484 | tree | |
4485 | build_duplicate_type (tree type) | |
4486 | { | |
1b369fae | 4487 | struct copy_body_data id; |
52dd234b RH |
4488 | |
4489 | memset (&id, 0, sizeof (id)); | |
1b369fae RH |
4490 | id.src_fn = current_function_decl; |
4491 | id.dst_fn = current_function_decl; | |
4492 | id.src_cfun = cfun; | |
6be42dd4 | 4493 | id.decl_map = pointer_map_create (); |
4009f2e7 | 4494 | id.copy_decl = copy_decl_no_change; |
52dd234b RH |
4495 | |
4496 | type = remap_type_1 (type, &id); | |
4497 | ||
6be42dd4 | 4498 | pointer_map_destroy (id.decl_map); |
52dd234b | 4499 | |
f31c9f09 DG |
4500 | TYPE_CANONICAL (type) = type; |
4501 | ||
52dd234b RH |
4502 | return type; |
4503 | } | |
ab442df7 MM |
4504 | |
4505 | /* Return whether it is safe to inline a function because it used different | |
4506 | target specific options or different optimization options. */ | |
4507 | bool | |
4508 | tree_can_inline_p (tree caller, tree callee) | |
4509 | { | |
5779e713 MM |
4510 | #if 0 |
4511 | /* This causes a regression in SPEC in that it prevents a cold function from | |
4512 | inlining a hot function. Perhaps this should only apply to functions | |
4513 | that the user declares hot/cold/optimize explicitly. */ | |
4514 | ||
ab442df7 MM |
4515 | /* Don't inline a function with a higher optimization level than the |
4516 | caller, or with different space constraints (hot/cold functions). */ | |
4517 | tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller); | |
4518 | tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee); | |
4519 | ||
4520 | if (caller_tree != callee_tree) | |
4521 | { | |
4522 | struct cl_optimization *caller_opt | |
4523 | = TREE_OPTIMIZATION ((caller_tree) | |
4524 | ? caller_tree | |
4525 | : optimization_default_node); | |
4526 | ||
4527 | struct cl_optimization *callee_opt | |
4528 | = TREE_OPTIMIZATION ((callee_tree) | |
4529 | ? callee_tree | |
4530 | : optimization_default_node); | |
4531 | ||
4532 | if ((caller_opt->optimize > callee_opt->optimize) | |
4533 | || (caller_opt->optimize_size != callee_opt->optimize_size)) | |
4534 | return false; | |
4535 | } | |
5779e713 | 4536 | #endif |
ab442df7 MM |
4537 | |
4538 | /* Allow the backend to decide if inlining is ok. */ | |
4539 | return targetm.target_option.can_inline_p (caller, callee); | |
4540 | } | |
9f5e9983 JJ |
4541 | |
4542 | #include "gt-tree-inline.h" |