]>
Commit | Line | Data |
---|---|---|
4ee9c684 | 1 | /* Miscellaneous SSA utility functions. |
d353bf18 | 2 | Copyright (C) 2001-2015 Free Software Foundation, Inc. |
4ee9c684 | 3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8c4c00c1 | 8 | the Free Software Foundation; either version 3, or (at your option) |
4ee9c684 | 9 | any later version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
9ef16211 | 23 | #include "backend.h" |
4ee9c684 | 24 | #include "tree.h" |
9ef16211 | 25 | #include "gimple.h" |
26 | #include "hard-reg-set.h" | |
27 | #include "ssa.h" | |
28 | #include "alias.h" | |
b20a8bb4 | 29 | #include "fold-const.h" |
9ed99284 | 30 | #include "stor-layout.h" |
4ee9c684 | 31 | #include "flags.h" |
4ee9c684 | 32 | #include "tm_p.h" |
9fa67218 | 33 | #include "target.h" |
4ee9c684 | 34 | #include "langhooks.h" |
ce084dfc | 35 | #include "gimple-pretty-print.h" |
bc61cadb | 36 | #include "internal-fn.h" |
37 | #include "gimple-fold.h" | |
a8783bee | 38 | #include "gimplify.h" |
dcf1a1ec | 39 | #include "gimple-iterator.h" |
40 | #include "gimple-walk.h" | |
05d9c18a | 41 | #include "tree-ssa-loop-manip.h" |
073c1fd5 | 42 | #include "tree-into-ssa.h" |
43 | #include "tree-ssa.h" | |
4ee9c684 | 44 | #include "tree-inline.h" |
4ee9c684 | 45 | #include "tree-pass.h" |
0b205f4c | 46 | #include "diagnostic-core.h" |
0ab72b53 | 47 | #include "cfgloop.h" |
e797f49f | 48 | #include "cfgexpand.h" |
4ee9c684 | 49 | |
d03ba86f | 50 | /* Pointer map of variable mappings, keyed by edge. */ |
06ecf488 | 51 | static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps; |
d03ba86f | 52 | |
53 | ||
54 | /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */ | |
55 | ||
56 | void | |
60d535d2 | 57 | redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus) |
d03ba86f | 58 | { |
d03ba86f | 59 | edge_var_map new_node; |
60 | ||
61 | if (edge_var_maps == NULL) | |
06ecf488 | 62 | edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >; |
d03ba86f | 63 | |
06ecf488 | 64 | auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e); |
d03ba86f | 65 | new_node.def = def; |
66 | new_node.result = result; | |
efbcb6de | 67 | new_node.locus = locus; |
d03ba86f | 68 | |
06ecf488 | 69 | slot.safe_push (new_node); |
d03ba86f | 70 | } |
71 | ||
72 | ||
73 | /* Clear the var mappings in edge E. */ | |
74 | ||
75 | void | |
76 | redirect_edge_var_map_clear (edge e) | |
77 | { | |
d03ba86f | 78 | if (!edge_var_maps) |
79 | return; | |
80 | ||
06ecf488 | 81 | auto_vec<edge_var_map> *head = edge_var_maps->get (e); |
d03ba86f | 82 | |
06ecf488 | 83 | if (head) |
84 | head->release (); | |
d03ba86f | 85 | } |
86 | ||
87 | ||
88 | /* Duplicate the redirected var mappings in OLDE in NEWE. | |
89 | ||
06ecf488 | 90 | This assumes a hash_map can have multiple edges mapping to the same |
91 | var_map (many to one mapping), since we don't remove the previous mappings. | |
92 | */ | |
d03ba86f | 93 | |
94 | void | |
95 | redirect_edge_var_map_dup (edge newe, edge olde) | |
96 | { | |
d03ba86f | 97 | if (!edge_var_maps) |
98 | return; | |
99 | ||
e4b3cdcb | 100 | auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe); |
101 | auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde); | |
102 | if (!old_head) | |
d03ba86f | 103 | return; |
d03ba86f | 104 | |
e4b3cdcb | 105 | new_head->safe_splice (*old_head); |
d03ba86f | 106 | } |
107 | ||
108 | ||
f0b5f617 | 109 | /* Return the variable mappings for a given edge. If there is none, return |
d03ba86f | 110 | NULL. */ |
111 | ||
06ecf488 | 112 | vec<edge_var_map> * |
d03ba86f | 113 | redirect_edge_var_map_vector (edge e) |
114 | { | |
d03ba86f | 115 | /* Hey, what kind of idiot would... you'd be surprised. */ |
116 | if (!edge_var_maps) | |
117 | return NULL; | |
118 | ||
06ecf488 | 119 | auto_vec<edge_var_map> *slot = edge_var_maps->get (e); |
d03ba86f | 120 | if (!slot) |
121 | return NULL; | |
122 | ||
06ecf488 | 123 | return slot; |
182db9cf | 124 | } |
d03ba86f | 125 | |
126 | /* Clear the edge variable mappings. */ | |
127 | ||
128 | void | |
129 | redirect_edge_var_map_destroy (void) | |
130 | { | |
06ecf488 | 131 | delete edge_var_maps; |
132 | edge_var_maps = NULL; | |
d03ba86f | 133 | } |
134 | ||
135 | ||
a61b7bc4 | 136 | /* Remove the corresponding arguments from the PHI nodes in E's |
137 | destination block and redirect it to DEST. Return redirected edge. | |
d03ba86f | 138 | The list of removed arguments is stored in a vector accessed |
139 | through edge_var_maps. */ | |
4ee9c684 | 140 | |
141 | edge | |
142 | ssa_redirect_edge (edge e, basic_block dest) | |
143 | { | |
1a91d914 | 144 | gphi_iterator gsi; |
145 | gphi *phi; | |
d03ba86f | 146 | |
147 | redirect_edge_var_map_clear (e); | |
4ee9c684 | 148 | |
149 | /* Remove the appropriate PHI arguments in E's destination block. */ | |
75a70cf9 | 150 | for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi)) |
4ee9c684 | 151 | { |
75a70cf9 | 152 | tree def; |
efbcb6de | 153 | source_location locus ; |
75a70cf9 | 154 | |
1a91d914 | 155 | phi = gsi.phi (); |
75a70cf9 | 156 | def = gimple_phi_arg_def (phi, e->dest_idx); |
efbcb6de | 157 | locus = gimple_phi_arg_location (phi, e->dest_idx); |
d03ba86f | 158 | |
159 | if (def == NULL_TREE) | |
4ee9c684 | 160 | continue; |
161 | ||
60d535d2 | 162 | redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus); |
4ee9c684 | 163 | } |
164 | ||
165 | e = redirect_edge_succ_nodup (e, dest); | |
4ee9c684 | 166 | |
167 | return e; | |
168 | } | |
169 | ||
75a70cf9 | 170 | |
4fb5e5ca | 171 | /* Add PHI arguments queued in PENDING_STMT list on edge E to edge |
44a46103 | 172 | E->dest. */ |
173 | ||
174 | void | |
175 | flush_pending_stmts (edge e) | |
176 | { | |
1a91d914 | 177 | gphi *phi; |
d03ba86f | 178 | edge_var_map *vm; |
179 | int i; | |
1a91d914 | 180 | gphi_iterator gsi; |
44a46103 | 181 | |
06ecf488 | 182 | vec<edge_var_map> *v = redirect_edge_var_map_vector (e); |
d03ba86f | 183 | if (!v) |
44a46103 | 184 | return; |
185 | ||
75a70cf9 | 186 | for (gsi = gsi_start_phis (e->dest), i = 0; |
f1f41a6c | 187 | !gsi_end_p (gsi) && v->iterate (i, &vm); |
75a70cf9 | 188 | gsi_next (&gsi), i++) |
44a46103 | 189 | { |
75a70cf9 | 190 | tree def; |
191 | ||
1a91d914 | 192 | phi = gsi.phi (); |
75a70cf9 | 193 | def = redirect_edge_var_map_def (vm); |
60d535d2 | 194 | add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm)); |
44a46103 | 195 | } |
196 | ||
d03ba86f | 197 | redirect_edge_var_map_clear (e); |
44a46103 | 198 | } |
4ee9c684 | 199 | |
9a4a3348 | 200 | /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a |
201 | GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an | |
202 | expression with a different value. | |
203 | ||
204 | This will update any annotations (say debug bind stmts) referring | |
205 | to the original LHS, so that they use the RHS instead. This is | |
206 | done even if NLHS and LHS are the same, for it is understood that | |
207 | the RHS will be modified afterwards, and NLHS will not be assigned | |
208 | an equivalent value. | |
209 | ||
210 | Adjusting any non-annotation uses of the LHS, if needed, is a | |
211 | responsibility of the caller. | |
212 | ||
213 | The effect of this call should be pretty much the same as that of | |
214 | inserting a copy of STMT before STMT, and then removing the | |
215 | original stmt, at which time gsi_remove() would have update | |
216 | annotations, but using this function saves all the inserting, | |
217 | copying and removing. */ | |
218 | ||
219 | void | |
220 | gimple_replace_ssa_lhs (gimple stmt, tree nlhs) | |
221 | { | |
222 | if (MAY_HAVE_DEBUG_STMTS) | |
223 | { | |
224 | tree lhs = gimple_get_lhs (stmt); | |
225 | ||
226 | gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt); | |
227 | ||
228 | insert_debug_temp_for_var_def (NULL, lhs); | |
229 | } | |
230 | ||
231 | gimple_set_lhs (stmt, nlhs); | |
232 | } | |
233 | ||
234 | ||
9845d120 | 235 | /* Given a tree for an expression for which we might want to emit |
236 | locations or values in debug information (generally a variable, but | |
237 | we might deal with other kinds of trees in the future), return the | |
238 | tree that should be used as the variable of a DEBUG_BIND STMT or | |
239 | VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */ | |
240 | ||
241 | tree | |
242 | target_for_debug_bind (tree var) | |
243 | { | |
244 | if (!MAY_HAVE_DEBUG_STMTS) | |
245 | return NULL_TREE; | |
246 | ||
ec11736b | 247 | if (TREE_CODE (var) == SSA_NAME) |
248 | { | |
249 | var = SSA_NAME_VAR (var); | |
250 | if (var == NULL_TREE) | |
251 | return NULL_TREE; | |
252 | } | |
253 | ||
b03e5397 | 254 | if ((TREE_CODE (var) != VAR_DECL |
255 | || VAR_DECL_IS_VIRTUAL_OPERAND (var)) | |
9845d120 | 256 | && TREE_CODE (var) != PARM_DECL) |
257 | return NULL_TREE; | |
258 | ||
259 | if (DECL_HAS_VALUE_EXPR_P (var)) | |
260 | return target_for_debug_bind (DECL_VALUE_EXPR (var)); | |
261 | ||
262 | if (DECL_IGNORED_P (var)) | |
263 | return NULL_TREE; | |
264 | ||
b03e5397 | 265 | /* var-tracking only tracks registers. */ |
266 | if (!is_gimple_reg_type (TREE_TYPE (var))) | |
267 | return NULL_TREE; | |
9845d120 | 268 | |
269 | return var; | |
270 | } | |
271 | ||
272 | /* Called via walk_tree, look for SSA_NAMEs that have already been | |
273 | released. */ | |
274 | ||
275 | static tree | |
276 | find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_) | |
277 | { | |
278 | struct walk_stmt_info *wi = (struct walk_stmt_info *) data_; | |
279 | ||
43e54ec3 | 280 | if (wi && wi->is_lhs) |
9845d120 | 281 | return NULL_TREE; |
282 | ||
283 | if (TREE_CODE (*tp) == SSA_NAME) | |
284 | { | |
285 | if (SSA_NAME_IN_FREE_LIST (*tp)) | |
286 | return *tp; | |
287 | ||
288 | *walk_subtrees = 0; | |
289 | } | |
290 | else if (IS_TYPE_OR_DECL_P (*tp)) | |
291 | *walk_subtrees = 0; | |
292 | ||
293 | return NULL_TREE; | |
294 | } | |
295 | ||
688ff29b | 296 | /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced |
297 | by other DEBUG stmts, and replace uses of the DEF with the | |
298 | newly-created debug temp. */ | |
299 | ||
9845d120 | 300 | void |
688ff29b | 301 | insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var) |
9845d120 | 302 | { |
303 | imm_use_iterator imm_iter; | |
9845d120 | 304 | use_operand_p use_p; |
688ff29b | 305 | gimple stmt; |
306 | gimple def_stmt = NULL; | |
307 | int usecount = 0; | |
9845d120 | 308 | tree value = NULL; |
9845d120 | 309 | |
310 | if (!MAY_HAVE_DEBUG_STMTS) | |
311 | return; | |
312 | ||
afed704c | 313 | /* If this name has already been registered for replacement, do nothing |
314 | as anything that uses this name isn't in SSA form. */ | |
315 | if (name_registered_for_update_p (var)) | |
316 | return; | |
317 | ||
318 | /* Check whether there are debug stmts that reference this variable and, | |
319 | if there are, decide whether we should use a debug temp. */ | |
688ff29b | 320 | FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var) |
9845d120 | 321 | { |
688ff29b | 322 | stmt = USE_STMT (use_p); |
9845d120 | 323 | |
688ff29b | 324 | if (!gimple_debug_bind_p (stmt)) |
9845d120 | 325 | continue; |
326 | ||
688ff29b | 327 | if (usecount++) |
328 | break; | |
329 | ||
330 | if (gimple_debug_bind_get_value (stmt) != var) | |
9845d120 | 331 | { |
688ff29b | 332 | /* Count this as an additional use, so as to make sure we |
333 | use a temp unless VAR's definition has a SINGLE_RHS that | |
334 | can be shared. */ | |
335 | usecount++; | |
336 | break; | |
337 | } | |
338 | } | |
9845d120 | 339 | |
688ff29b | 340 | if (!usecount) |
341 | return; | |
9845d120 | 342 | |
688ff29b | 343 | if (gsi) |
344 | def_stmt = gsi_stmt (*gsi); | |
345 | else | |
346 | def_stmt = SSA_NAME_DEF_STMT (var); | |
9845d120 | 347 | |
688ff29b | 348 | /* If we didn't get an insertion point, and the stmt has already |
349 | been removed, we won't be able to insert the debug bind stmt, so | |
350 | we'll have to drop debug information. */ | |
43e54ec3 | 351 | if (gimple_code (def_stmt) == GIMPLE_PHI) |
352 | { | |
1a91d914 | 353 | value = degenerate_phi_result (as_a <gphi *> (def_stmt)); |
43e54ec3 | 354 | if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL)) |
355 | value = NULL; | |
a05063ec | 356 | /* error_mark_node is what fixup_noreturn_call changes PHI arguments |
357 | to. */ | |
358 | else if (value == error_mark_node) | |
359 | value = NULL; | |
43e54ec3 | 360 | } |
361 | else if (is_gimple_assign (def_stmt)) | |
688ff29b | 362 | { |
363 | bool no_value = false; | |
9845d120 | 364 | |
688ff29b | 365 | if (!dom_info_available_p (CDI_DOMINATORS)) |
366 | { | |
367 | struct walk_stmt_info wi; | |
368 | ||
369 | memset (&wi, 0, sizeof (wi)); | |
370 | ||
371 | /* When removing blocks without following reverse dominance | |
372 | order, we may sometimes encounter SSA_NAMEs that have | |
373 | already been released, referenced in other SSA_DEFs that | |
374 | we're about to release. Consider: | |
375 | ||
376 | <bb X>: | |
377 | v_1 = foo; | |
378 | ||
379 | <bb Y>: | |
380 | w_2 = v_1 + bar; | |
381 | # DEBUG w => w_2 | |
382 | ||
383 | If we deleted BB X first, propagating the value of w_2 | |
384 | won't do us any good. It's too late to recover their | |
385 | original definition of v_1: when it was deleted, it was | |
386 | only referenced in other DEFs, it couldn't possibly know | |
387 | it should have been retained, and propagating every | |
388 | single DEF just in case it might have to be propagated | |
389 | into a DEBUG STMT would probably be too wasteful. | |
390 | ||
391 | When dominator information is not readily available, we | |
392 | check for and accept some loss of debug information. But | |
393 | if it is available, there's no excuse for us to remove | |
394 | blocks in the wrong order, so we don't even check for | |
395 | dead SSA NAMEs. SSA verification shall catch any | |
396 | errors. */ | |
397 | if ((!gsi && !gimple_bb (def_stmt)) | |
7f9256fe | 398 | || walk_gimple_op (def_stmt, find_released_ssa_name, &wi)) |
688ff29b | 399 | no_value = true; |
9845d120 | 400 | } |
401 | ||
688ff29b | 402 | if (!no_value) |
403 | value = gimple_assign_rhs_to_tree (def_stmt); | |
404 | } | |
405 | ||
406 | if (value) | |
407 | { | |
408 | /* If there's a single use of VAR, and VAR is the entire debug | |
409 | expression (usecount would have been incremented again | |
410 | otherwise), and the definition involves only constants and | |
411 | SSA names, then we can propagate VALUE into this single use, | |
412 | avoiding the temp. | |
413 | ||
414 | We can also avoid using a temp if VALUE can be shared and | |
415 | propagated into all uses, without generating expressions that | |
416 | wouldn't be valid gimple RHSs. | |
417 | ||
418 | Other cases that would require unsharing or non-gimple RHSs | |
419 | are deferred to a debug temp, although we could avoid temps | |
420 | at the expense of duplication of expressions. */ | |
421 | ||
422 | if (CONSTANT_CLASS_P (value) | |
43e54ec3 | 423 | || gimple_code (def_stmt) == GIMPLE_PHI |
688ff29b | 424 | || (usecount == 1 |
425 | && (!gimple_assign_single_p (def_stmt) | |
426 | || is_gimple_min_invariant (value))) | |
427 | || is_gimple_reg (value)) | |
11db727d | 428 | ; |
688ff29b | 429 | else |
9845d120 | 430 | { |
1a91d914 | 431 | gdebug *def_temp; |
688ff29b | 432 | tree vexpr = make_node (DEBUG_EXPR_DECL); |
433 | ||
434 | def_temp = gimple_build_debug_bind (vexpr, | |
435 | unshare_expr (value), | |
436 | def_stmt); | |
437 | ||
438 | DECL_ARTIFICIAL (vexpr) = 1; | |
439 | TREE_TYPE (vexpr) = TREE_TYPE (value); | |
440 | if (DECL_P (value)) | |
441 | DECL_MODE (vexpr) = DECL_MODE (value); | |
442 | else | |
443 | DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value)); | |
9845d120 | 444 | |
688ff29b | 445 | if (gsi) |
446 | gsi_insert_before (gsi, def_temp, GSI_SAME_STMT); | |
447 | else | |
9845d120 | 448 | { |
688ff29b | 449 | gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt); |
450 | gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT); | |
9845d120 | 451 | } |
452 | ||
688ff29b | 453 | value = vexpr; |
9845d120 | 454 | } |
688ff29b | 455 | } |
9845d120 | 456 | |
688ff29b | 457 | FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var) |
458 | { | |
459 | if (!gimple_debug_bind_p (stmt)) | |
460 | continue; | |
461 | ||
462 | if (value) | |
02a562db | 463 | { |
464 | FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter) | |
465 | /* unshare_expr is not needed here. vexpr is either a | |
466 | SINGLE_RHS, that can be safely shared, some other RHS | |
467 | that was unshared when we found it had a single debug | |
468 | use, or a DEBUG_EXPR_DECL, that can be safely | |
469 | shared. */ | |
11db727d | 470 | SET_USE (use_p, unshare_expr (value)); |
02a562db | 471 | /* If we didn't replace uses with a debug decl fold the |
472 | resulting expression. Otherwise we end up with invalid IL. */ | |
473 | if (TREE_CODE (value) != DEBUG_EXPR_DECL) | |
50aacf4c | 474 | { |
475 | gimple_stmt_iterator gsi = gsi_for_stmt (stmt); | |
476 | fold_stmt_inplace (&gsi); | |
477 | } | |
02a562db | 478 | } |
688ff29b | 479 | else |
480 | gimple_debug_bind_reset_value (stmt); | |
9845d120 | 481 | |
482 | update_stmt (stmt); | |
483 | } | |
484 | } | |
485 | ||
486 | ||
688ff29b | 487 | /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by |
488 | other DEBUG stmts, and replace uses of the DEF with the | |
489 | newly-created debug temp. */ | |
9845d120 | 490 | |
491 | void | |
688ff29b | 492 | insert_debug_temps_for_defs (gimple_stmt_iterator *gsi) |
9845d120 | 493 | { |
688ff29b | 494 | gimple stmt; |
9845d120 | 495 | ssa_op_iter op_iter; |
496 | def_operand_p def_p; | |
497 | ||
498 | if (!MAY_HAVE_DEBUG_STMTS) | |
499 | return; | |
500 | ||
688ff29b | 501 | stmt = gsi_stmt (*gsi); |
502 | ||
43e54ec3 | 503 | FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF) |
9845d120 | 504 | { |
505 | tree var = DEF_FROM_PTR (def_p); | |
506 | ||
507 | if (TREE_CODE (var) != SSA_NAME) | |
508 | continue; | |
509 | ||
688ff29b | 510 | insert_debug_temp_for_var_def (gsi, var); |
9845d120 | 511 | } |
512 | } | |
513 | ||
8ebd8f29 | 514 | /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */ |
515 | ||
516 | void | |
517 | reset_debug_uses (gimple stmt) | |
518 | { | |
519 | ssa_op_iter op_iter; | |
520 | def_operand_p def_p; | |
521 | imm_use_iterator imm_iter; | |
522 | gimple use_stmt; | |
523 | ||
524 | if (!MAY_HAVE_DEBUG_STMTS) | |
525 | return; | |
526 | ||
527 | FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF) | |
528 | { | |
529 | tree var = DEF_FROM_PTR (def_p); | |
530 | ||
531 | if (TREE_CODE (var) != SSA_NAME) | |
532 | continue; | |
533 | ||
534 | FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var) | |
535 | { | |
536 | if (!gimple_debug_bind_p (use_stmt)) | |
537 | continue; | |
538 | ||
539 | gimple_debug_bind_reset_value (use_stmt); | |
540 | update_stmt (use_stmt); | |
541 | } | |
542 | } | |
543 | } | |
544 | ||
1b223194 | 545 | /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing |
546 | dominated stmts before their dominators, so that release_ssa_defs | |
547 | stands a chance of propagating DEFs into debug bind stmts. */ | |
548 | ||
549 | void | |
550 | release_defs_bitset (bitmap toremove) | |
551 | { | |
552 | unsigned j; | |
553 | bitmap_iterator bi; | |
554 | ||
555 | /* Performing a topological sort is probably overkill, this will | |
556 | most likely run in slightly superlinear time, rather than the | |
557 | pathological quadratic worst case. */ | |
558 | while (!bitmap_empty_p (toremove)) | |
559 | EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi) | |
560 | { | |
561 | bool remove_now = true; | |
562 | tree var = ssa_name (j); | |
563 | gimple stmt; | |
564 | imm_use_iterator uit; | |
565 | ||
566 | FOR_EACH_IMM_USE_STMT (stmt, uit, var) | |
567 | { | |
568 | ssa_op_iter dit; | |
569 | def_operand_p def_p; | |
570 | ||
571 | /* We can't propagate PHI nodes into debug stmts. */ | |
572 | if (gimple_code (stmt) == GIMPLE_PHI | |
573 | || is_gimple_debug (stmt)) | |
574 | continue; | |
575 | ||
576 | /* If we find another definition to remove that uses | |
577 | the one we're looking at, defer the removal of this | |
578 | one, so that it can be propagated into debug stmts | |
579 | after the other is. */ | |
580 | FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF) | |
581 | { | |
582 | tree odef = DEF_FROM_PTR (def_p); | |
583 | ||
584 | if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef))) | |
585 | { | |
586 | remove_now = false; | |
587 | break; | |
588 | } | |
589 | } | |
590 | ||
591 | if (!remove_now) | |
592 | BREAK_FROM_IMM_USE_STMT (uit); | |
593 | } | |
594 | ||
595 | if (remove_now) | |
596 | { | |
597 | gimple def = SSA_NAME_DEF_STMT (var); | |
598 | gimple_stmt_iterator gsi = gsi_for_stmt (def); | |
599 | ||
600 | if (gimple_code (def) == GIMPLE_PHI) | |
601 | remove_phi_node (&gsi, true); | |
602 | else | |
603 | { | |
604 | gsi_remove (&gsi, true); | |
605 | release_defs (def); | |
606 | } | |
607 | ||
608 | bitmap_clear_bit (toremove, j); | |
609 | } | |
610 | } | |
611 | } | |
612 | ||
81d08033 | 613 | /* Return true if SSA_NAME is malformed and mark it visited. |
4ee9c684 | 614 | |
81d08033 | 615 | IS_VIRTUAL is true if this SSA_NAME was found inside a virtual |
616 | operand. */ | |
4ee9c684 | 617 | |
618 | static bool | |
81d08033 | 619 | verify_ssa_name (tree ssa_name, bool is_virtual) |
4ee9c684 | 620 | { |
4ee9c684 | 621 | if (TREE_CODE (ssa_name) != SSA_NAME) |
622 | { | |
0a81f5a0 | 623 | error ("expected an SSA_NAME object"); |
81d08033 | 624 | return true; |
4ee9c684 | 625 | } |
626 | ||
bcef4e8e | 627 | if (SSA_NAME_IN_FREE_LIST (ssa_name)) |
cbbefea4 | 628 | { |
bcef4e8e | 629 | error ("found an SSA_NAME that had been released into the free pool"); |
cbbefea4 | 630 | return true; |
631 | } | |
632 | ||
bcef4e8e | 633 | if (SSA_NAME_VAR (ssa_name) != NULL_TREE |
634 | && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name))) | |
81d08033 | 635 | { |
bcef4e8e | 636 | error ("type mismatch between an SSA_NAME and its symbol"); |
81d08033 | 637 | return true; |
638 | } | |
639 | ||
7c782c9b | 640 | if (is_virtual && !virtual_operand_p (ssa_name)) |
81d08033 | 641 | { |
0a81f5a0 | 642 | error ("found a virtual definition for a GIMPLE register"); |
81d08033 | 643 | return true; |
644 | } | |
645 | ||
dd277d48 | 646 | if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun)) |
647 | { | |
648 | error ("virtual SSA name for non-VOP decl"); | |
649 | return true; | |
650 | } | |
651 | ||
7c782c9b | 652 | if (!is_virtual && virtual_operand_p (ssa_name)) |
81d08033 | 653 | { |
0a81f5a0 | 654 | error ("found a real definition for a non-register"); |
81d08033 | 655 | return true; |
656 | } | |
657 | ||
4fb5e5ca | 658 | if (SSA_NAME_IS_DEFAULT_DEF (ssa_name) |
75a70cf9 | 659 | && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))) |
4fb5e5ca | 660 | { |
661 | error ("found a default name with a non-empty defining statement"); | |
662 | return true; | |
663 | } | |
664 | ||
81d08033 | 665 | return false; |
666 | } | |
667 | ||
668 | ||
669 | /* Return true if the definition of SSA_NAME at block BB is malformed. | |
670 | ||
671 | STMT is the statement where SSA_NAME is created. | |
672 | ||
673 | DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME | |
674 | version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set, | |
675 | it means that the block in that array slot contains the | |
676 | definition of SSA_NAME. | |
677 | ||
4fb5e5ca | 678 | IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */ |
81d08033 | 679 | |
680 | static bool | |
681 | verify_def (basic_block bb, basic_block *definition_block, tree ssa_name, | |
75a70cf9 | 682 | gimple stmt, bool is_virtual) |
81d08033 | 683 | { |
684 | if (verify_ssa_name (ssa_name, is_virtual)) | |
685 | goto err; | |
686 | ||
ec11736b | 687 | if (SSA_NAME_VAR (ssa_name) |
688 | && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL | |
524a0531 | 689 | && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name))) |
690 | { | |
bf776685 | 691 | error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set"); |
524a0531 | 692 | goto err; |
693 | } | |
694 | ||
4ee9c684 | 695 | if (definition_block[SSA_NAME_VERSION (ssa_name)]) |
696 | { | |
697 | error ("SSA_NAME created in two different blocks %i and %i", | |
698 | definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index); | |
81d08033 | 699 | goto err; |
4ee9c684 | 700 | } |
701 | ||
702 | definition_block[SSA_NAME_VERSION (ssa_name)] = bb; | |
703 | ||
704 | if (SSA_NAME_DEF_STMT (ssa_name) != stmt) | |
705 | { | |
706 | error ("SSA_NAME_DEF_STMT is wrong"); | |
4ee9c684 | 707 | fprintf (stderr, "Expected definition statement:\n"); |
75a70cf9 | 708 | print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS); |
4ee9c684 | 709 | fprintf (stderr, "\nActual definition statement:\n"); |
75a70cf9 | 710 | print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); |
81d08033 | 711 | goto err; |
4ee9c684 | 712 | } |
713 | ||
81d08033 | 714 | return false; |
715 | ||
716 | err: | |
717 | fprintf (stderr, "while verifying SSA_NAME "); | |
718 | print_generic_expr (stderr, ssa_name, 0); | |
719 | fprintf (stderr, " in statement\n"); | |
75a70cf9 | 720 | print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); |
81d08033 | 721 | |
722 | return true; | |
4ee9c684 | 723 | } |
724 | ||
725 | ||
726 | /* Return true if the use of SSA_NAME at statement STMT in block BB is | |
727 | malformed. | |
728 | ||
729 | DEF_BB is the block where SSA_NAME was found to be created. | |
730 | ||
731 | IDOM contains immediate dominator information for the flowgraph. | |
732 | ||
733 | CHECK_ABNORMAL is true if the caller wants to check whether this use | |
734 | is flowing through an abnormal edge (only used when checking PHI | |
81d08033 | 735 | arguments). |
736 | ||
e599eacb | 737 | If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names |
738 | that are defined before STMT in basic block BB. */ | |
4ee9c684 | 739 | |
740 | static bool | |
22aa74c4 | 741 | verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p, |
75a70cf9 | 742 | gimple stmt, bool check_abnormal, bitmap names_defined_in_bb) |
4ee9c684 | 743 | { |
744 | bool err = false; | |
22aa74c4 | 745 | tree ssa_name = USE_FROM_PTR (use_p); |
4ee9c684 | 746 | |
22aa74c4 | 747 | if (!TREE_VISITED (ssa_name)) |
748 | if (verify_imm_links (stderr, ssa_name)) | |
749 | err = true; | |
750 | ||
06c1e160 | 751 | TREE_VISITED (ssa_name) = 1; |
81d08033 | 752 | |
75a70cf9 | 753 | if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)) |
4fb5e5ca | 754 | && SSA_NAME_IS_DEFAULT_DEF (ssa_name)) |
81d08033 | 755 | ; /* Default definitions have empty statements. Nothing to do. */ |
4ee9c684 | 756 | else if (!def_bb) |
757 | { | |
0a81f5a0 | 758 | error ("missing definition"); |
4ee9c684 | 759 | err = true; |
760 | } | |
761 | else if (bb != def_bb | |
762 | && !dominated_by_p (CDI_DOMINATORS, bb, def_bb)) | |
763 | { | |
0a81f5a0 | 764 | error ("definition in block %i does not dominate use in block %i", |
4ee9c684 | 765 | def_bb->index, bb->index); |
766 | err = true; | |
767 | } | |
e599eacb | 768 | else if (bb == def_bb |
769 | && names_defined_in_bb != NULL | |
770 | && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name))) | |
771 | { | |
0a81f5a0 | 772 | error ("definition in block %i follows the use", def_bb->index); |
e599eacb | 773 | err = true; |
774 | } | |
4ee9c684 | 775 | |
776 | if (check_abnormal | |
777 | && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) | |
778 | { | |
779 | error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set"); | |
780 | err = true; | |
781 | } | |
782 | ||
48e1416a | 783 | /* Make sure the use is in an appropriate list by checking the previous |
20833d12 | 784 | element to make sure it's the same. */ |
22aa74c4 | 785 | if (use_p->prev == NULL) |
786 | { | |
0a81f5a0 | 787 | error ("no immediate_use list"); |
22aa74c4 | 788 | err = true; |
789 | } | |
790 | else | |
791 | { | |
75a70cf9 | 792 | tree listvar; |
22aa74c4 | 793 | if (use_p->prev->use == NULL) |
75a70cf9 | 794 | listvar = use_p->prev->loc.ssa_name; |
22aa74c4 | 795 | else |
796 | listvar = USE_FROM_PTR (use_p->prev); | |
797 | if (listvar != ssa_name) | |
798 | { | |
0a81f5a0 | 799 | error ("wrong immediate use list"); |
22aa74c4 | 800 | err = true; |
801 | } | |
802 | } | |
803 | ||
4ee9c684 | 804 | if (err) |
805 | { | |
806 | fprintf (stderr, "for SSA_NAME: "); | |
94dcc41f | 807 | print_generic_expr (stderr, ssa_name, TDF_VOPS); |
88dbf20f | 808 | fprintf (stderr, " in statement:\n"); |
75a70cf9 | 809 | print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); |
4ee9c684 | 810 | } |
811 | ||
812 | return err; | |
813 | } | |
814 | ||
815 | ||
816 | /* Return true if any of the arguments for PHI node PHI at block BB is | |
817 | malformed. | |
818 | ||
4fb5e5ca | 819 | DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME |
820 | version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set, | |
821 | it means that the block in that array slot contains the | |
822 | definition of SSA_NAME. */ | |
4ee9c684 | 823 | |
824 | static bool | |
1a91d914 | 825 | verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block) |
4ee9c684 | 826 | { |
827 | edge e; | |
828 | bool err = false; | |
75a70cf9 | 829 | size_t i, phi_num_args = gimple_phi_num_args (phi); |
4ee9c684 | 830 | |
79aae762 | 831 | if (EDGE_COUNT (bb->preds) != phi_num_args) |
832 | { | |
0a81f5a0 | 833 | error ("incoming edge count does not match number of PHI arguments"); |
79aae762 | 834 | err = true; |
835 | goto error; | |
836 | } | |
837 | ||
4ee9c684 | 838 | for (i = 0; i < phi_num_args; i++) |
839 | { | |
75a70cf9 | 840 | use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i); |
22aa74c4 | 841 | tree op = USE_FROM_PTR (op_p); |
842 | ||
e0ebfc19 | 843 | e = EDGE_PRED (bb, i); |
cc890649 | 844 | |
845 | if (op == NULL_TREE) | |
846 | { | |
0a81f5a0 | 847 | error ("PHI argument is missing for edge %d->%d", |
cc890649 | 848 | e->src->index, |
849 | e->dest->index); | |
850 | err = true; | |
851 | goto error; | |
852 | } | |
853 | ||
79aae762 | 854 | if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op)) |
855 | { | |
856 | error ("PHI argument is not SSA_NAME, or invariant"); | |
857 | err = true; | |
858 | } | |
859 | ||
4ee9c684 | 860 | if (TREE_CODE (op) == SSA_NAME) |
4fb5e5ca | 861 | { |
7c782c9b | 862 | err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi))); |
4fb5e5ca | 863 | err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)], |
864 | op_p, phi, e->flags & EDGE_ABNORMAL, NULL); | |
865 | } | |
4ee9c684 | 866 | |
86f2ad37 | 867 | if (TREE_CODE (op) == ADDR_EXPR) |
868 | { | |
869 | tree base = TREE_OPERAND (op, 0); | |
870 | while (handled_component_p (base)) | |
871 | base = TREE_OPERAND (base, 0); | |
872 | if ((TREE_CODE (base) == VAR_DECL | |
873 | || TREE_CODE (base) == PARM_DECL | |
874 | || TREE_CODE (base) == RESULT_DECL) | |
875 | && !TREE_ADDRESSABLE (base)) | |
876 | { | |
877 | error ("address taken, but ADDRESSABLE bit not set"); | |
878 | err = true; | |
879 | } | |
880 | } | |
881 | ||
4ee9c684 | 882 | if (e->dest != bb) |
883 | { | |
0a81f5a0 | 884 | error ("wrong edge %d->%d for PHI argument", |
7781aa77 | 885 | e->src->index, e->dest->index); |
4ee9c684 | 886 | err = true; |
887 | } | |
888 | ||
4ee9c684 | 889 | if (err) |
890 | { | |
891 | fprintf (stderr, "PHI argument\n"); | |
94dcc41f | 892 | print_generic_stmt (stderr, op, TDF_VOPS); |
81d08033 | 893 | goto error; |
4ee9c684 | 894 | } |
4ee9c684 | 895 | } |
896 | ||
81d08033 | 897 | error: |
4ee9c684 | 898 | if (err) |
899 | { | |
900 | fprintf (stderr, "for PHI node\n"); | |
75a70cf9 | 901 | print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS); |
4ee9c684 | 902 | } |
903 | ||
904 | ||
905 | return err; | |
906 | } | |
907 | ||
908 | ||
909 | /* Verify common invariants in the SSA web. | |
910 | TODO: verify the variable annotations. */ | |
911 | ||
4b987fac | 912 | DEBUG_FUNCTION void |
71b65939 | 913 | verify_ssa (bool check_modified_stmt, bool check_ssa_operands) |
4ee9c684 | 914 | { |
81d08033 | 915 | size_t i; |
4ee9c684 | 916 | basic_block bb; |
680a19b9 | 917 | basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names); |
43daa21e | 918 | ssa_op_iter iter; |
919 | tree op; | |
50b08d37 | 920 | enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS); |
27335ffd | 921 | bitmap names_defined_in_bb = BITMAP_ALLOC (NULL); |
4ee9c684 | 922 | |
dd277d48 | 923 | gcc_assert (!need_ssa_update_p (cfun)); |
095dcfa3 | 924 | |
4ee9c684 | 925 | timevar_push (TV_TREE_SSA_VERIFY); |
926 | ||
81d08033 | 927 | /* Keep track of SSA names present in the IL. */ |
928 | for (i = 1; i < num_ssa_names; i++) | |
4ee9c684 | 929 | { |
79aae762 | 930 | tree name = ssa_name (i); |
931 | if (name) | |
4ee9c684 | 932 | { |
75a70cf9 | 933 | gimple stmt; |
79aae762 | 934 | TREE_VISITED (name) = 0; |
4ee9c684 | 935 | |
7c782c9b | 936 | verify_ssa_name (name, virtual_operand_p (name)); |
85f3d834 | 937 | |
79aae762 | 938 | stmt = SSA_NAME_DEF_STMT (name); |
75a70cf9 | 939 | if (!gimple_nop_p (stmt)) |
81d08033 | 940 | { |
75a70cf9 | 941 | basic_block bb = gimple_bb (stmt); |
fcf70d14 | 942 | if (verify_def (bb, definition_block, |
943 | name, stmt, virtual_operand_p (name))) | |
944 | goto err; | |
4ee9c684 | 945 | } |
946 | } | |
947 | } | |
948 | ||
79aae762 | 949 | calculate_dominance_info (CDI_DOMINATORS); |
4ee9c684 | 950 | |
951 | /* Now verify all the uses and make sure they agree with the definitions | |
952 | found in the previous pass. */ | |
fc00614f | 953 | FOR_EACH_BB_FN (bb, cfun) |
4ee9c684 | 954 | { |
955 | edge e; | |
cd665a06 | 956 | edge_iterator ei; |
4ee9c684 | 957 | |
958 | /* Make sure that all edges have a clear 'aux' field. */ | |
cd665a06 | 959 | FOR_EACH_EDGE (e, ei, bb->preds) |
4ee9c684 | 960 | { |
961 | if (e->aux) | |
962 | { | |
0a81f5a0 | 963 | error ("AUX pointer initialized for edge %d->%d", e->src->index, |
4ee9c684 | 964 | e->dest->index); |
81d08033 | 965 | goto err; |
4ee9c684 | 966 | } |
967 | } | |
968 | ||
969 | /* Verify the arguments for every PHI node in the block. */ | |
1a91d914 | 970 | for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
e599eacb | 971 | { |
1a91d914 | 972 | gphi *phi = gsi.phi (); |
e599eacb | 973 | if (verify_phi_args (phi, bb, definition_block)) |
974 | goto err; | |
4fb5e5ca | 975 | |
e599eacb | 976 | bitmap_set_bit (names_defined_in_bb, |
75a70cf9 | 977 | SSA_NAME_VERSION (gimple_phi_result (phi))); |
e599eacb | 978 | } |
4ee9c684 | 979 | |
81d08033 | 980 | /* Now verify all the uses and vuses in every statement of the block. */ |
1a91d914 | 981 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); |
982 | gsi_next (&gsi)) | |
4ee9c684 | 983 | { |
75a70cf9 | 984 | gimple stmt = gsi_stmt (gsi); |
22aa74c4 | 985 | use_operand_p use_p; |
986 | ||
75a70cf9 | 987 | if (check_modified_stmt && gimple_modified_p (stmt)) |
22aa74c4 | 988 | { |
4fb5e5ca | 989 | error ("stmt (%p) marked modified after optimization pass: ", |
b66731e8 | 990 | (void *)stmt); |
75a70cf9 | 991 | print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); |
22aa74c4 | 992 | goto err; |
993 | } | |
4ee9c684 | 994 | |
71b65939 | 995 | if (check_ssa_operands && verify_ssa_operands (cfun, stmt)) |
dd277d48 | 996 | { |
85f3d834 | 997 | print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); |
dd277d48 | 998 | goto err; |
4fb5e5ca | 999 | } |
1000 | ||
85f3d834 | 1001 | if (gimple_debug_bind_p (stmt) |
1002 | && !gimple_debug_bind_has_value_p (stmt)) | |
1003 | continue; | |
4fb5e5ca | 1004 | |
1005 | FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE) | |
4ee9c684 | 1006 | { |
22aa74c4 | 1007 | op = USE_FROM_PTR (use_p); |
81d08033 | 1008 | if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)], |
4fb5e5ca | 1009 | use_p, stmt, false, names_defined_in_bb)) |
e599eacb | 1010 | goto err; |
1011 | } | |
1012 | ||
1013 | FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS) | |
dd277d48 | 1014 | { |
1015 | if (SSA_NAME_DEF_STMT (op) != stmt) | |
1016 | { | |
1017 | error ("SSA_NAME_DEF_STMT is wrong"); | |
1018 | fprintf (stderr, "Expected definition statement:\n"); | |
1019 | print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); | |
1020 | fprintf (stderr, "\nActual definition statement:\n"); | |
1021 | print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op), | |
1022 | 4, TDF_VOPS); | |
1023 | goto err; | |
1024 | } | |
1025 | bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op)); | |
1026 | } | |
e599eacb | 1027 | } |
1028 | ||
e599eacb | 1029 | bitmap_clear (names_defined_in_bb); |
4ee9c684 | 1030 | } |
1031 | ||
81d08033 | 1032 | free (definition_block); |
095dcfa3 | 1033 | |
c26a6416 | 1034 | /* Restore the dominance information to its prior known state, so |
1035 | that we do not perturb the compiler's subsequent behavior. */ | |
08323300 | 1036 | if (orig_dom_state == DOM_NONE) |
1037 | free_dominance_info (CDI_DOMINATORS); | |
1038 | else | |
50b08d37 | 1039 | set_dom_info_availability (CDI_DOMINATORS, orig_dom_state); |
48e1416a | 1040 | |
27335ffd | 1041 | BITMAP_FREE (names_defined_in_bb); |
4ee9c684 | 1042 | timevar_pop (TV_TREE_SSA_VERIFY); |
81d08033 | 1043 | return; |
4ee9c684 | 1044 | |
81d08033 | 1045 | err: |
0a81f5a0 | 1046 | internal_error ("verify_ssa failed"); |
4ee9c684 | 1047 | } |
1048 | ||
1049 | ||
4ee9c684 | 1050 | /* Initialize global DFA and SSA structures. */ |
1051 | ||
1052 | void | |
bcaa2770 | 1053 | init_tree_ssa (struct function *fn) |
4ee9c684 | 1054 | { |
25a27413 | 1055 | fn->gimple_df = ggc_cleared_alloc<gimple_df> (); |
2ef51f0e | 1056 | fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20); |
dd277d48 | 1057 | pt_solution_reset (&fn->gimple_df->escaped); |
bcaa2770 | 1058 | init_ssanames (fn, 0); |
4ee9c684 | 1059 | } |
1060 | ||
3db65b62 | 1061 | /* Do the actions required to initialize internal data structures used |
1062 | in tree-ssa optimization passes. */ | |
1063 | ||
1064 | static unsigned int | |
1065 | execute_init_datastructures (void) | |
1066 | { | |
1067 | /* Allocate hash tables, arrays and other structures. */ | |
6e1aa353 | 1068 | gcc_assert (!cfun->gimple_df); |
3db65b62 | 1069 | init_tree_ssa (cfun); |
1070 | return 0; | |
1071 | } | |
1072 | ||
cbe8bda8 | 1073 | namespace { |
1074 | ||
1075 | const pass_data pass_data_init_datastructures = | |
3db65b62 | 1076 | { |
cbe8bda8 | 1077 | GIMPLE_PASS, /* type */ |
1078 | "*init_datastructures", /* name */ | |
1079 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 1080 | TV_NONE, /* tv_id */ |
1081 | PROP_cfg, /* properties_required */ | |
1082 | 0, /* properties_provided */ | |
1083 | 0, /* properties_destroyed */ | |
1084 | 0, /* todo_flags_start */ | |
1085 | 0, /* todo_flags_finish */ | |
3db65b62 | 1086 | }; |
4ee9c684 | 1087 | |
cbe8bda8 | 1088 | class pass_init_datastructures : public gimple_opt_pass |
1089 | { | |
1090 | public: | |
9af5ce0c | 1091 | pass_init_datastructures (gcc::context *ctxt) |
1092 | : gimple_opt_pass (pass_data_init_datastructures, ctxt) | |
cbe8bda8 | 1093 | {} |
1094 | ||
1095 | /* opt_pass methods: */ | |
31315c24 | 1096 | virtual bool gate (function *fun) |
1097 | { | |
1098 | /* Do nothing for funcions that was produced already in SSA form. */ | |
1099 | return !(fun->curr_properties & PROP_ssa); | |
1100 | } | |
1101 | ||
65b0537f | 1102 | virtual unsigned int execute (function *) |
1103 | { | |
1104 | return execute_init_datastructures (); | |
1105 | } | |
cbe8bda8 | 1106 | |
1107 | }; // class pass_init_datastructures | |
1108 | ||
1109 | } // anon namespace | |
1110 | ||
1111 | gimple_opt_pass * | |
1112 | make_pass_init_datastructures (gcc::context *ctxt) | |
1113 | { | |
1114 | return new pass_init_datastructures (ctxt); | |
1115 | } | |
1116 | ||
4ee9c684 | 1117 | /* Deallocate memory associated with SSA data structures for FNDECL. */ |
1118 | ||
1119 | void | |
1120 | delete_tree_ssa (void) | |
1121 | { | |
4ee9c684 | 1122 | fini_ssanames (); |
75a70cf9 | 1123 | |
1124 | /* We no longer maintain the SSA operand cache at this point. */ | |
8d672d12 | 1125 | if (ssa_operands_active (cfun)) |
861b4e39 | 1126 | fini_ssa_operands (cfun); |
4ee9c684 | 1127 | |
2ef51f0e | 1128 | cfun->gimple_df->default_defs->empty (); |
f5ab006b | 1129 | cfun->gimple_df->default_defs = NULL; |
dd277d48 | 1130 | pt_solution_reset (&cfun->gimple_df->escaped); |
3a443843 | 1131 | if (cfun->gimple_df->decls_to_pointers != NULL) |
5f8841a5 | 1132 | delete cfun->gimple_df->decls_to_pointers; |
3a443843 | 1133 | cfun->gimple_df->decls_to_pointers = NULL; |
2d04fd8d | 1134 | cfun->gimple_df->modified_noreturn_calls = NULL; |
f5ab006b | 1135 | cfun->gimple_df = NULL; |
d03ba86f | 1136 | |
1137 | /* We no longer need the edge variable maps. */ | |
1138 | redirect_edge_var_map_destroy (); | |
4ee9c684 | 1139 | } |
1140 | ||
4ee9c684 | 1141 | /* Return true if EXPR is a useless type conversion, otherwise return |
1142 | false. */ | |
1143 | ||
1144 | bool | |
1145 | tree_ssa_useless_type_conversion (tree expr) | |
1146 | { | |
1147 | /* If we have an assignment that merely uses a NOP_EXPR to change | |
1148 | the top of the RHS to the type of the LHS and the type conversion | |
1149 | is "safe", then strip away the type conversion so that we can | |
1150 | enter LHS = RHS into the const_and_copies table. */ | |
72dd6141 | 1151 | if (CONVERT_EXPR_P (expr) |
b6b93727 | 1152 | || TREE_CODE (expr) == VIEW_CONVERT_EXPR |
1153 | || TREE_CODE (expr) == NON_LVALUE_EXPR) | |
548044d8 | 1154 | return useless_type_conversion_p |
c2f47e15 | 1155 | (TREE_TYPE (expr), |
75a70cf9 | 1156 | TREE_TYPE (TREE_OPERAND (expr, 0))); |
4ee9c684 | 1157 | |
1158 | return false; | |
1159 | } | |
1160 | ||
98112881 | 1161 | /* Strip conversions from EXP according to |
1162 | tree_ssa_useless_type_conversion and return the resulting | |
1163 | expression. */ | |
1164 | ||
1165 | tree | |
1166 | tree_ssa_strip_useless_type_conversions (tree exp) | |
1167 | { | |
1168 | while (tree_ssa_useless_type_conversion (exp)) | |
1169 | exp = TREE_OPERAND (exp, 0); | |
1170 | return exp; | |
1171 | } | |
1172 | ||
4ee9c684 | 1173 | |
4d8d655b | 1174 | /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what |
1175 | should be returned if the value is only partially undefined. */ | |
1d2fabca | 1176 | |
1177 | bool | |
4d8d655b | 1178 | ssa_undefined_value_p (tree t, bool partial) |
1d2fabca | 1179 | { |
f772e50c | 1180 | gimple def_stmt; |
1d2fabca | 1181 | tree var = SSA_NAME_VAR (t); |
1182 | ||
1183 | if (!var) | |
1184 | ; | |
1185 | /* Parameters get their initial value from the function entry. */ | |
1186 | else if (TREE_CODE (var) == PARM_DECL) | |
1187 | return false; | |
1188 | /* When returning by reference the return address is actually a hidden | |
1189 | parameter. */ | |
1190 | else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var)) | |
1191 | return false; | |
1192 | /* Hard register variables get their initial value from the ether. */ | |
1193 | else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var)) | |
1194 | return false; | |
1195 | ||
1196 | /* The value is undefined iff its definition statement is empty. */ | |
f772e50c | 1197 | def_stmt = SSA_NAME_DEF_STMT (t); |
1198 | if (gimple_nop_p (def_stmt)) | |
1199 | return true; | |
1200 | ||
1201 | /* Check if the complex was not only partially defined. */ | |
4d8d655b | 1202 | if (partial && is_gimple_assign (def_stmt) |
f772e50c | 1203 | && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR) |
1204 | { | |
1205 | tree rhs1, rhs2; | |
1206 | ||
1207 | rhs1 = gimple_assign_rhs1 (def_stmt); | |
1208 | rhs2 = gimple_assign_rhs2 (def_stmt); | |
1209 | return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1)) | |
1210 | || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2)); | |
1211 | } | |
1212 | return false; | |
1d2fabca | 1213 | } |
1214 | ||
1215 | ||
182cf5a9 | 1216 | /* If necessary, rewrite the base of the reference tree *TP from |
1217 | a MEM_REF to a plain or converted symbol. */ | |
1218 | ||
1219 | static void | |
e70e8b13 | 1220 | maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming) |
182cf5a9 | 1221 | { |
1222 | tree sym; | |
1223 | ||
1224 | while (handled_component_p (*tp)) | |
1225 | tp = &TREE_OPERAND (*tp, 0); | |
1226 | if (TREE_CODE (*tp) == MEM_REF | |
1227 | && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR | |
182cf5a9 | 1228 | && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0)) |
1229 | && DECL_P (sym) | |
1230 | && !TREE_ADDRESSABLE (sym) | |
e70e8b13 | 1231 | && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))) |
182cf5a9 | 1232 | { |
8a62fbb0 | 1233 | if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE |
1234 | && useless_type_conversion_p (TREE_TYPE (*tp), | |
1235 | TREE_TYPE (TREE_TYPE (sym))) | |
1236 | && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1), | |
1237 | TYPE_SIZE_UNIT (TREE_TYPE (*tp)))) | |
1238 | { | |
1239 | *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym, | |
1240 | TYPE_SIZE (TREE_TYPE (*tp)), | |
1241 | int_const_binop (MULT_EXPR, | |
1242 | bitsize_int (BITS_PER_UNIT), | |
317e2a67 | 1243 | TREE_OPERAND (*tp, 1))); |
8a62fbb0 | 1244 | } |
8ba0d70d | 1245 | else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE |
1246 | && useless_type_conversion_p (TREE_TYPE (*tp), | |
1247 | TREE_TYPE (TREE_TYPE (sym)))) | |
1248 | { | |
1249 | *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1)) | |
1250 | ? REALPART_EXPR : IMAGPART_EXPR, | |
1251 | TREE_TYPE (*tp), sym); | |
1252 | } | |
8a62fbb0 | 1253 | else if (integer_zerop (TREE_OPERAND (*tp, 1))) |
1254 | { | |
1255 | if (!useless_type_conversion_p (TREE_TYPE (*tp), | |
1256 | TREE_TYPE (sym))) | |
1257 | *tp = build1 (VIEW_CONVERT_EXPR, | |
1258 | TREE_TYPE (*tp), sym); | |
1259 | else | |
1260 | *tp = sym; | |
1261 | } | |
182cf5a9 | 1262 | } |
1263 | } | |
1264 | ||
cfe3cae3 | 1265 | /* For a tree REF return its base if it is the base of a MEM_REF |
1266 | that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */ | |
1267 | ||
1268 | static tree | |
1269 | non_rewritable_mem_ref_base (tree ref) | |
1270 | { | |
1271 | tree base = ref; | |
1272 | ||
1273 | /* A plain decl does not need it set. */ | |
1274 | if (DECL_P (ref)) | |
1275 | return NULL_TREE; | |
1276 | ||
1277 | while (handled_component_p (base)) | |
1278 | base = TREE_OPERAND (base, 0); | |
1279 | ||
1280 | /* But watch out for MEM_REFs we cannot lower to a | |
8a62fbb0 | 1281 | VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */ |
cfe3cae3 | 1282 | if (TREE_CODE (base) == MEM_REF |
1283 | && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR) | |
1284 | { | |
1285 | tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0); | |
8ba0d70d | 1286 | if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE |
1287 | || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE) | |
8a62fbb0 | 1288 | && useless_type_conversion_p (TREE_TYPE (base), |
1289 | TREE_TYPE (TREE_TYPE (decl))) | |
796b6678 | 1290 | && wi::fits_uhwi_p (mem_ref_offset (base)) |
cd9b5516 | 1291 | && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))), |
796b6678 | 1292 | mem_ref_offset (base)) |
8a62fbb0 | 1293 | && multiple_of_p (sizetype, TREE_OPERAND (base, 1), |
1294 | TYPE_SIZE_UNIT (TREE_TYPE (base)))) | |
1295 | return NULL_TREE; | |
cfe3cae3 | 1296 | if (DECL_P (decl) |
1297 | && (!integer_zerop (TREE_OPERAND (base, 1)) | |
1298 | || (DECL_SIZE (decl) | |
f698147c | 1299 | != TYPE_SIZE (TREE_TYPE (base))) |
1300 | || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base))) | |
cfe3cae3 | 1301 | return decl; |
1302 | } | |
1303 | ||
1304 | return NULL_TREE; | |
1305 | } | |
1306 | ||
b88d109e | 1307 | /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form. |
1308 | Otherwise return true. */ | |
1309 | ||
1310 | static bool | |
1311 | non_rewritable_lvalue_p (tree lhs) | |
1312 | { | |
1313 | /* A plain decl is always rewritable. */ | |
1314 | if (DECL_P (lhs)) | |
1315 | return false; | |
1316 | ||
e3c6a1ed | 1317 | /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in |
1318 | a reasonably efficient manner... */ | |
1319 | if ((TREE_CODE (lhs) == REALPART_EXPR | |
1320 | || TREE_CODE (lhs) == IMAGPART_EXPR) | |
1321 | && DECL_P (TREE_OPERAND (lhs, 0))) | |
1322 | return false; | |
1323 | ||
b88d109e | 1324 | /* A decl that is wrapped inside a MEM-REF that covers |
1325 | it full is also rewritable. | |
1326 | ??? The following could be relaxed allowing component | |
ef618bb8 | 1327 | references that do not change the access size. */ |
b88d109e | 1328 | if (TREE_CODE (lhs) == MEM_REF |
1329 | && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR | |
1330 | && integer_zerop (TREE_OPERAND (lhs, 1))) | |
1331 | { | |
1332 | tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0); | |
1333 | if (DECL_P (decl) | |
1334 | && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs)) | |
1335 | && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs))) | |
1336 | return false; | |
1337 | } | |
1338 | ||
1339 | return true; | |
1340 | } | |
1341 | ||
5ea2e42f | 1342 | /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and |
1343 | mark the variable VAR for conversion into SSA. Return true when updating | |
1344 | stmts is required. */ | |
cfe3cae3 | 1345 | |
e70e8b13 | 1346 | static void |
1347 | maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs, | |
1348 | bitmap suitable_for_renaming) | |
cfe3cae3 | 1349 | { |
cfe3cae3 | 1350 | /* Global Variables, result decls cannot be changed. */ |
1351 | if (is_global_var (var) | |
1352 | || TREE_CODE (var) == RESULT_DECL | |
1353 | || bitmap_bit_p (addresses_taken, DECL_UID (var))) | |
e70e8b13 | 1354 | return; |
11bbec13 | 1355 | |
cfe3cae3 | 1356 | if (TREE_ADDRESSABLE (var) |
1357 | /* Do not change TREE_ADDRESSABLE if we need to preserve var as | |
1358 | a non-register. Otherwise we are confused and forget to | |
1359 | add virtual operands for it. */ | |
1360 | && (!is_gimple_reg_type (TREE_TYPE (var)) | |
7e21e36b | 1361 | || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE |
1362 | || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE | |
cfe3cae3 | 1363 | || !bitmap_bit_p (not_reg_needs, DECL_UID (var)))) |
1364 | { | |
1365 | TREE_ADDRESSABLE (var) = 0; | |
1366 | if (is_gimple_reg (var)) | |
e70e8b13 | 1367 | bitmap_set_bit (suitable_for_renaming, DECL_UID (var)); |
cfe3cae3 | 1368 | if (dump_file) |
1369 | { | |
5ea2e42f | 1370 | fprintf (dump_file, "No longer having address taken: "); |
cfe3cae3 | 1371 | print_generic_expr (dump_file, var, 0); |
1372 | fprintf (dump_file, "\n"); | |
1373 | } | |
1374 | } | |
5ea2e42f | 1375 | |
cfe3cae3 | 1376 | if (!DECL_GIMPLE_REG_P (var) |
1377 | && !bitmap_bit_p (not_reg_needs, DECL_UID (var)) | |
1378 | && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE | |
1379 | || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE) | |
1380 | && !TREE_THIS_VOLATILE (var) | |
1381 | && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var))) | |
1382 | { | |
1383 | DECL_GIMPLE_REG_P (var) = 1; | |
e70e8b13 | 1384 | bitmap_set_bit (suitable_for_renaming, DECL_UID (var)); |
cfe3cae3 | 1385 | if (dump_file) |
1386 | { | |
5ea2e42f | 1387 | fprintf (dump_file, "Now a gimple register: "); |
cfe3cae3 | 1388 | print_generic_expr (dump_file, var, 0); |
1389 | fprintf (dump_file, "\n"); | |
1390 | } | |
1391 | } | |
cfe3cae3 | 1392 | } |
1393 | ||
3f99bd94 | 1394 | /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */ |
83cfcdaf | 1395 | |
dd277d48 | 1396 | void |
5ea2e42f | 1397 | execute_update_addresses_taken (void) |
83cfcdaf | 1398 | { |
83cfcdaf | 1399 | basic_block bb; |
1400 | bitmap addresses_taken = BITMAP_ALLOC (NULL); | |
3f99bd94 | 1401 | bitmap not_reg_needs = BITMAP_ALLOC (NULL); |
e70e8b13 | 1402 | bitmap suitable_for_renaming = BITMAP_ALLOC (NULL); |
5ea2e42f | 1403 | tree var; |
cfe3cae3 | 1404 | unsigned i; |
83cfcdaf | 1405 | |
4b366dd3 | 1406 | timevar_push (TV_ADDRESS_TAKEN); |
1407 | ||
83cfcdaf | 1408 | /* Collect into ADDRESSES_TAKEN all variables whose address is taken within |
1409 | the function body. */ | |
fc00614f | 1410 | FOR_EACH_BB_FN (bb, cfun) |
83cfcdaf | 1411 | { |
1a91d914 | 1412 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); |
1413 | gsi_next (&gsi)) | |
83cfcdaf | 1414 | { |
6d5ec6f8 | 1415 | gimple stmt = gsi_stmt (gsi); |
3f99bd94 | 1416 | enum gimple_code code = gimple_code (stmt); |
cfe3cae3 | 1417 | tree decl; |
6d5ec6f8 | 1418 | |
1419 | /* Note all addresses taken by the stmt. */ | |
1420 | gimple_ior_addresses_taken (addresses_taken, stmt); | |
1421 | ||
3f99bd94 | 1422 | /* If we have a call or an assignment, see if the lhs contains |
1423 | a local decl that requires not to be a gimple register. */ | |
1424 | if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) | |
1425 | { | |
d29f7fa8 | 1426 | tree lhs = gimple_get_lhs (stmt); |
b88d109e | 1427 | if (lhs |
1428 | && TREE_CODE (lhs) != SSA_NAME | |
1429 | && non_rewritable_lvalue_p (lhs)) | |
182cf5a9 | 1430 | { |
b88d109e | 1431 | decl = get_base_address (lhs); |
1432 | if (DECL_P (decl)) | |
1433 | bitmap_set_bit (not_reg_needs, DECL_UID (decl)); | |
182cf5a9 | 1434 | } |
1435 | } | |
1436 | ||
1437 | if (gimple_assign_single_p (stmt)) | |
1438 | { | |
1439 | tree rhs = gimple_assign_rhs1 (stmt); | |
cfe3cae3 | 1440 | if ((decl = non_rewritable_mem_ref_base (rhs))) |
1441 | bitmap_set_bit (not_reg_needs, DECL_UID (decl)); | |
1442 | } | |
d29f7fa8 | 1443 | |
cfe3cae3 | 1444 | else if (code == GIMPLE_CALL) |
1445 | { | |
1446 | for (i = 0; i < gimple_call_num_args (stmt); ++i) | |
182cf5a9 | 1447 | { |
cfe3cae3 | 1448 | tree arg = gimple_call_arg (stmt, i); |
1449 | if ((decl = non_rewritable_mem_ref_base (arg))) | |
1450 | bitmap_set_bit (not_reg_needs, DECL_UID (decl)); | |
1451 | } | |
1452 | } | |
1453 | ||
1454 | else if (code == GIMPLE_ASM) | |
1455 | { | |
1a91d914 | 1456 | gasm *asm_stmt = as_a <gasm *> (stmt); |
1457 | for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) | |
cfe3cae3 | 1458 | { |
1a91d914 | 1459 | tree link = gimple_asm_output_op (asm_stmt, i); |
737f8866 | 1460 | tree lhs = TREE_VALUE (link); |
ef618bb8 | 1461 | if (TREE_CODE (lhs) != SSA_NAME) |
737f8866 | 1462 | { |
b88d109e | 1463 | decl = get_base_address (lhs); |
ef618bb8 | 1464 | if (DECL_P (decl) |
1465 | && (non_rewritable_lvalue_p (lhs) | |
1466 | /* We cannot move required conversions from | |
1467 | the lhs to the rhs in asm statements, so | |
1468 | require we do not need any. */ | |
1469 | || !useless_type_conversion_p | |
1470 | (TREE_TYPE (lhs), TREE_TYPE (decl)))) | |
b88d109e | 1471 | bitmap_set_bit (not_reg_needs, DECL_UID (decl)); |
737f8866 | 1472 | } |
cfe3cae3 | 1473 | } |
1a91d914 | 1474 | for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) |
cfe3cae3 | 1475 | { |
1a91d914 | 1476 | tree link = gimple_asm_input_op (asm_stmt, i); |
cfe3cae3 | 1477 | if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link)))) |
1478 | bitmap_set_bit (not_reg_needs, DECL_UID (decl)); | |
1479 | } | |
3f99bd94 | 1480 | } |
83cfcdaf | 1481 | } |
75a70cf9 | 1482 | |
1a91d914 | 1483 | for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); |
1484 | gsi_next (&gsi)) | |
83cfcdaf | 1485 | { |
75a70cf9 | 1486 | size_t i; |
1a91d914 | 1487 | gphi *phi = gsi.phi (); |
75a70cf9 | 1488 | |
1489 | for (i = 0; i < gimple_phi_num_args (phi); i++) | |
83cfcdaf | 1490 | { |
1491 | tree op = PHI_ARG_DEF (phi, i), var; | |
1492 | if (TREE_CODE (op) == ADDR_EXPR | |
75a70cf9 | 1493 | && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL |
83cfcdaf | 1494 | && DECL_P (var)) |
1495 | bitmap_set_bit (addresses_taken, DECL_UID (var)); | |
1496 | } | |
1497 | } | |
1498 | } | |
1499 | ||
5ea2e42f | 1500 | /* We cannot iterate over all referenced vars because that can contain |
1501 | unused vars from BLOCK trees, which causes code generation differences | |
1502 | for -g vs. -g0. */ | |
1503 | for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var)) | |
e70e8b13 | 1504 | maybe_optimize_var (var, addresses_taken, not_reg_needs, |
1505 | suitable_for_renaming); | |
5ea2e42f | 1506 | |
f1f41a6c | 1507 | FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var) |
e70e8b13 | 1508 | maybe_optimize_var (var, addresses_taken, not_reg_needs, |
1509 | suitable_for_renaming); | |
83cfcdaf | 1510 | |
5ea2e42f | 1511 | /* Operand caches need to be recomputed for operands referencing the updated |
e70e8b13 | 1512 | variables and operands need to be rewritten to expose bare symbols. */ |
1513 | if (!bitmap_empty_p (suitable_for_renaming)) | |
dd277d48 | 1514 | { |
fc00614f | 1515 | FOR_EACH_BB_FN (bb, cfun) |
1a91d914 | 1516 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) |
182cf5a9 | 1517 | { |
1518 | gimple stmt = gsi_stmt (gsi); | |
dd277d48 | 1519 | |
182cf5a9 | 1520 | /* Re-write TARGET_MEM_REFs of symbols we want to |
1521 | rewrite into SSA form. */ | |
1522 | if (gimple_assign_single_p (stmt)) | |
1523 | { | |
1524 | tree lhs = gimple_assign_lhs (stmt); | |
1525 | tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt); | |
1526 | tree sym; | |
1527 | ||
e3c6a1ed | 1528 | /* Rewrite LHS IMAG/REALPART_EXPR similar to |
1529 | gimplify_modify_expr_complex_part. */ | |
1530 | if ((TREE_CODE (lhs) == IMAGPART_EXPR | |
1531 | || TREE_CODE (lhs) == REALPART_EXPR) | |
1532 | && DECL_P (TREE_OPERAND (lhs, 0)) | |
1533 | && bitmap_bit_p (suitable_for_renaming, | |
1534 | DECL_UID (TREE_OPERAND (lhs, 0)))) | |
1535 | { | |
1536 | tree other = make_ssa_name (TREE_TYPE (lhs)); | |
1537 | tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR | |
1538 | ? REALPART_EXPR : IMAGPART_EXPR, | |
1539 | TREE_TYPE (other), | |
1540 | TREE_OPERAND (lhs, 0)); | |
1541 | gimple load = gimple_build_assign (other, lrhs); | |
08110213 | 1542 | location_t loc = gimple_location (stmt); |
1543 | gimple_set_location (load, loc); | |
e3c6a1ed | 1544 | gimple_set_vuse (load, gimple_vuse (stmt)); |
1545 | gsi_insert_before (&gsi, load, GSI_SAME_STMT); | |
1546 | gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0)); | |
1547 | gimple_assign_set_rhs_with_ops | |
1548 | (&gsi, COMPLEX_EXPR, | |
1549 | TREE_CODE (lhs) == IMAGPART_EXPR | |
1550 | ? other : gimple_assign_rhs1 (stmt), | |
1551 | TREE_CODE (lhs) == IMAGPART_EXPR | |
1552 | ? gimple_assign_rhs1 (stmt) : other, NULL_TREE); | |
1553 | stmt = gsi_stmt (gsi); | |
1554 | unlink_stmt_vdef (stmt); | |
1555 | update_stmt (stmt); | |
1556 | continue; | |
1557 | } | |
1558 | ||
182cf5a9 | 1559 | /* We shouldn't have any fancy wrapping of |
1560 | component-refs on the LHS, but look through | |
1561 | VIEW_CONVERT_EXPRs as that is easy. */ | |
1562 | while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR) | |
1563 | lhs = TREE_OPERAND (lhs, 0); | |
1564 | if (TREE_CODE (lhs) == MEM_REF | |
1565 | && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR | |
1566 | && integer_zerop (TREE_OPERAND (lhs, 1)) | |
1567 | && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0)) | |
1568 | && DECL_P (sym) | |
1569 | && !TREE_ADDRESSABLE (sym) | |
e70e8b13 | 1570 | && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))) |
182cf5a9 | 1571 | lhs = sym; |
1572 | else | |
1573 | lhs = gimple_assign_lhs (stmt); | |
1574 | ||
1575 | /* Rewrite the RHS and make sure the resulting assignment | |
1576 | is validly typed. */ | |
e70e8b13 | 1577 | maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming); |
182cf5a9 | 1578 | rhs = gimple_assign_rhs1 (stmt); |
1579 | if (gimple_assign_lhs (stmt) != lhs | |
1580 | && !useless_type_conversion_p (TREE_TYPE (lhs), | |
1581 | TREE_TYPE (rhs))) | |
1582 | rhs = fold_build1 (VIEW_CONVERT_EXPR, | |
1583 | TREE_TYPE (lhs), rhs); | |
1584 | ||
1585 | if (gimple_assign_lhs (stmt) != lhs) | |
1586 | gimple_assign_set_lhs (stmt, lhs); | |
1587 | ||
1588 | if (gimple_assign_rhs1 (stmt) != rhs) | |
1589 | { | |
1590 | gimple_stmt_iterator gsi = gsi_for_stmt (stmt); | |
1591 | gimple_assign_set_rhs_from_tree (&gsi, rhs); | |
1592 | } | |
1593 | } | |
1594 | ||
cfe3cae3 | 1595 | else if (gimple_code (stmt) == GIMPLE_CALL) |
1596 | { | |
1597 | unsigned i; | |
1598 | for (i = 0; i < gimple_call_num_args (stmt); ++i) | |
1599 | { | |
1600 | tree *argp = gimple_call_arg_ptr (stmt, i); | |
e70e8b13 | 1601 | maybe_rewrite_mem_ref_base (argp, suitable_for_renaming); |
cfe3cae3 | 1602 | } |
1603 | } | |
1604 | ||
1605 | else if (gimple_code (stmt) == GIMPLE_ASM) | |
182cf5a9 | 1606 | { |
1a91d914 | 1607 | gasm *asm_stmt = as_a <gasm *> (stmt); |
182cf5a9 | 1608 | unsigned i; |
1a91d914 | 1609 | for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) |
182cf5a9 | 1610 | { |
1a91d914 | 1611 | tree link = gimple_asm_output_op (asm_stmt, i); |
e70e8b13 | 1612 | maybe_rewrite_mem_ref_base (&TREE_VALUE (link), |
1613 | suitable_for_renaming); | |
182cf5a9 | 1614 | } |
1a91d914 | 1615 | for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) |
182cf5a9 | 1616 | { |
1a91d914 | 1617 | tree link = gimple_asm_input_op (asm_stmt, i); |
e70e8b13 | 1618 | maybe_rewrite_mem_ref_base (&TREE_VALUE (link), |
1619 | suitable_for_renaming); | |
182cf5a9 | 1620 | } |
1621 | } | |
1622 | ||
fd2b4bf0 | 1623 | else if (gimple_debug_bind_p (stmt) |
1624 | && gimple_debug_bind_has_value_p (stmt)) | |
1625 | { | |
1626 | tree *valuep = gimple_debug_bind_get_value_ptr (stmt); | |
1627 | tree decl; | |
e70e8b13 | 1628 | maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming); |
fd2b4bf0 | 1629 | decl = non_rewritable_mem_ref_base (*valuep); |
e70e8b13 | 1630 | if (decl |
1631 | && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl))) | |
fd2b4bf0 | 1632 | gimple_debug_bind_reset_value (stmt); |
1633 | } | |
1634 | ||
182cf5a9 | 1635 | if (gimple_references_memory_p (stmt) |
1636 | || is_gimple_debug (stmt)) | |
1637 | update_stmt (stmt); | |
9b57c1ed | 1638 | |
1639 | gsi_next (&gsi); | |
182cf5a9 | 1640 | } |
dd277d48 | 1641 | |
1642 | /* Update SSA form here, we are called as non-pass as well. */ | |
41f75a99 | 1643 | if (number_of_loops (cfun) > 1 |
1644 | && loops_state_satisfies_p (LOOP_CLOSED_SSA)) | |
0ab72b53 | 1645 | rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa); |
1646 | else | |
1647 | update_ssa (TODO_update_ssa); | |
dd277d48 | 1648 | } |
83cfcdaf | 1649 | |
3f99bd94 | 1650 | BITMAP_FREE (not_reg_needs); |
83cfcdaf | 1651 | BITMAP_FREE (addresses_taken); |
e70e8b13 | 1652 | BITMAP_FREE (suitable_for_renaming); |
4b366dd3 | 1653 | timevar_pop (TV_ADDRESS_TAKEN); |
83cfcdaf | 1654 | } |
1655 | ||
cbe8bda8 | 1656 | namespace { |
1657 | ||
1658 | const pass_data pass_data_update_address_taken = | |
83cfcdaf | 1659 | { |
cbe8bda8 | 1660 | GIMPLE_PASS, /* type */ |
1661 | "addressables", /* name */ | |
1662 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 1663 | TV_ADDRESS_TAKEN, /* tv_id */ |
1664 | PROP_ssa, /* properties_required */ | |
1665 | 0, /* properties_provided */ | |
1666 | 0, /* properties_destroyed */ | |
1667 | 0, /* todo_flags_start */ | |
1668 | TODO_update_address_taken, /* todo_flags_finish */ | |
83cfcdaf | 1669 | }; |
cbe8bda8 | 1670 | |
1671 | class pass_update_address_taken : public gimple_opt_pass | |
1672 | { | |
1673 | public: | |
9af5ce0c | 1674 | pass_update_address_taken (gcc::context *ctxt) |
1675 | : gimple_opt_pass (pass_data_update_address_taken, ctxt) | |
cbe8bda8 | 1676 | {} |
1677 | ||
1678 | /* opt_pass methods: */ | |
1679 | ||
1680 | }; // class pass_update_address_taken | |
1681 | ||
1682 | } // anon namespace | |
1683 | ||
1684 | gimple_opt_pass * | |
1685 | make_pass_update_address_taken (gcc::context *ctxt) | |
1686 | { | |
1687 | return new pass_update_address_taken (ctxt); | |
1688 | } |