]>
Commit | Line | Data |
---|---|---|
4ee9c684 | 1 | /* Miscellaneous SSA utility functions. |
fbd26352 | 2 | Copyright (C) 2001-2019 Free Software Foundation, Inc. |
4ee9c684 | 3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8c4c00c1 | 8 | the Free Software Foundation; either version 3, or (at your option) |
4ee9c684 | 9 | any later version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
9ef16211 | 23 | #include "backend.h" |
4ee9c684 | 24 | #include "tree.h" |
9ef16211 | 25 | #include "gimple.h" |
7c29e30e | 26 | #include "cfghooks.h" |
27 | #include "tree-pass.h" | |
9ef16211 | 28 | #include "ssa.h" |
7c29e30e | 29 | #include "gimple-pretty-print.h" |
30 | #include "diagnostic-core.h" | |
b20a8bb4 | 31 | #include "fold-const.h" |
9ed99284 | 32 | #include "stor-layout.h" |
bc61cadb | 33 | #include "gimple-fold.h" |
a8783bee | 34 | #include "gimplify.h" |
dcf1a1ec | 35 | #include "gimple-iterator.h" |
36 | #include "gimple-walk.h" | |
05d9c18a | 37 | #include "tree-ssa-loop-manip.h" |
073c1fd5 | 38 | #include "tree-into-ssa.h" |
39 | #include "tree-ssa.h" | |
0ab72b53 | 40 | #include "cfgloop.h" |
e797f49f | 41 | #include "cfgexpand.h" |
fcc0ec6b | 42 | #include "tree-cfg.h" |
43 | #include "tree-dfa.h" | |
30a86690 | 44 | #include "stringpool.h" |
45 | #include "attribs.h" | |
c51887c5 | 46 | #include "asan.h" |
4ee9c684 | 47 | |
d03ba86f | 48 | /* Pointer map of variable mappings, keyed by edge. */ |
06ecf488 | 49 | static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps; |
d03ba86f | 50 | |
51 | ||
52 | /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */ | |
53 | ||
54 | void | |
be1e7283 | 55 | redirect_edge_var_map_add (edge e, tree result, tree def, location_t locus) |
d03ba86f | 56 | { |
d03ba86f | 57 | edge_var_map new_node; |
58 | ||
59 | if (edge_var_maps == NULL) | |
06ecf488 | 60 | edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >; |
d03ba86f | 61 | |
06ecf488 | 62 | auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e); |
d03ba86f | 63 | new_node.def = def; |
64 | new_node.result = result; | |
efbcb6de | 65 | new_node.locus = locus; |
d03ba86f | 66 | |
06ecf488 | 67 | slot.safe_push (new_node); |
d03ba86f | 68 | } |
69 | ||
70 | ||
71 | /* Clear the var mappings in edge E. */ | |
72 | ||
73 | void | |
74 | redirect_edge_var_map_clear (edge e) | |
75 | { | |
d03ba86f | 76 | if (!edge_var_maps) |
77 | return; | |
78 | ||
06ecf488 | 79 | auto_vec<edge_var_map> *head = edge_var_maps->get (e); |
d03ba86f | 80 | |
06ecf488 | 81 | if (head) |
82 | head->release (); | |
d03ba86f | 83 | } |
84 | ||
85 | ||
86 | /* Duplicate the redirected var mappings in OLDE in NEWE. | |
87 | ||
06ecf488 | 88 | This assumes a hash_map can have multiple edges mapping to the same |
89 | var_map (many to one mapping), since we don't remove the previous mappings. | |
90 | */ | |
d03ba86f | 91 | |
92 | void | |
93 | redirect_edge_var_map_dup (edge newe, edge olde) | |
94 | { | |
d03ba86f | 95 | if (!edge_var_maps) |
96 | return; | |
97 | ||
e4b3cdcb | 98 | auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe); |
99 | auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde); | |
100 | if (!old_head) | |
d03ba86f | 101 | return; |
d03ba86f | 102 | |
e4b3cdcb | 103 | new_head->safe_splice (*old_head); |
d03ba86f | 104 | } |
105 | ||
106 | ||
f0b5f617 | 107 | /* Return the variable mappings for a given edge. If there is none, return |
d03ba86f | 108 | NULL. */ |
109 | ||
06ecf488 | 110 | vec<edge_var_map> * |
d03ba86f | 111 | redirect_edge_var_map_vector (edge e) |
112 | { | |
d03ba86f | 113 | /* Hey, what kind of idiot would... you'd be surprised. */ |
114 | if (!edge_var_maps) | |
115 | return NULL; | |
116 | ||
06ecf488 | 117 | auto_vec<edge_var_map> *slot = edge_var_maps->get (e); |
d03ba86f | 118 | if (!slot) |
119 | return NULL; | |
120 | ||
06ecf488 | 121 | return slot; |
182db9cf | 122 | } |
d03ba86f | 123 | |
124 | /* Clear the edge variable mappings. */ | |
125 | ||
126 | void | |
b1090780 | 127 | redirect_edge_var_map_empty (void) |
d03ba86f | 128 | { |
b1090780 | 129 | if (edge_var_maps) |
130 | edge_var_maps->empty (); | |
d03ba86f | 131 | } |
132 | ||
133 | ||
a61b7bc4 | 134 | /* Remove the corresponding arguments from the PHI nodes in E's |
135 | destination block and redirect it to DEST. Return redirected edge. | |
d03ba86f | 136 | The list of removed arguments is stored in a vector accessed |
137 | through edge_var_maps. */ | |
4ee9c684 | 138 | |
139 | edge | |
140 | ssa_redirect_edge (edge e, basic_block dest) | |
141 | { | |
1a91d914 | 142 | gphi_iterator gsi; |
143 | gphi *phi; | |
d03ba86f | 144 | |
145 | redirect_edge_var_map_clear (e); | |
4ee9c684 | 146 | |
e76fa056 | 147 | /* Remove the appropriate PHI arguments in E's destination block. |
148 | If we are redirecting a copied edge the destination has not | |
149 | got PHI argument space reserved nor an interesting argument. */ | |
150 | if (! (e->dest->flags & BB_DUPLICATED)) | |
151 | for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi)) | |
152 | { | |
153 | tree def; | |
be1e7283 | 154 | location_t locus; |
e76fa056 | 155 | |
156 | phi = gsi.phi (); | |
157 | def = gimple_phi_arg_def (phi, e->dest_idx); | |
158 | locus = gimple_phi_arg_location (phi, e->dest_idx); | |
159 | ||
160 | if (def == NULL_TREE) | |
161 | continue; | |
162 | ||
163 | redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus); | |
164 | } | |
4ee9c684 | 165 | |
166 | e = redirect_edge_succ_nodup (e, dest); | |
4ee9c684 | 167 | |
168 | return e; | |
169 | } | |
170 | ||
75a70cf9 | 171 | |
4fb5e5ca | 172 | /* Add PHI arguments queued in PENDING_STMT list on edge E to edge |
44a46103 | 173 | E->dest. */ |
174 | ||
175 | void | |
176 | flush_pending_stmts (edge e) | |
177 | { | |
1a91d914 | 178 | gphi *phi; |
d03ba86f | 179 | edge_var_map *vm; |
180 | int i; | |
1a91d914 | 181 | gphi_iterator gsi; |
44a46103 | 182 | |
06ecf488 | 183 | vec<edge_var_map> *v = redirect_edge_var_map_vector (e); |
d03ba86f | 184 | if (!v) |
44a46103 | 185 | return; |
186 | ||
75a70cf9 | 187 | for (gsi = gsi_start_phis (e->dest), i = 0; |
f1f41a6c | 188 | !gsi_end_p (gsi) && v->iterate (i, &vm); |
75a70cf9 | 189 | gsi_next (&gsi), i++) |
44a46103 | 190 | { |
75a70cf9 | 191 | tree def; |
192 | ||
1a91d914 | 193 | phi = gsi.phi (); |
75a70cf9 | 194 | def = redirect_edge_var_map_def (vm); |
60d535d2 | 195 | add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm)); |
44a46103 | 196 | } |
197 | ||
d03ba86f | 198 | redirect_edge_var_map_clear (e); |
44a46103 | 199 | } |
4ee9c684 | 200 | |
9a4a3348 | 201 | /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a |
202 | GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an | |
203 | expression with a different value. | |
204 | ||
205 | This will update any annotations (say debug bind stmts) referring | |
206 | to the original LHS, so that they use the RHS instead. This is | |
207 | done even if NLHS and LHS are the same, for it is understood that | |
208 | the RHS will be modified afterwards, and NLHS will not be assigned | |
209 | an equivalent value. | |
210 | ||
211 | Adjusting any non-annotation uses of the LHS, if needed, is a | |
212 | responsibility of the caller. | |
213 | ||
214 | The effect of this call should be pretty much the same as that of | |
215 | inserting a copy of STMT before STMT, and then removing the | |
216 | original stmt, at which time gsi_remove() would have update | |
217 | annotations, but using this function saves all the inserting, | |
218 | copying and removing. */ | |
219 | ||
220 | void | |
42acab1c | 221 | gimple_replace_ssa_lhs (gimple *stmt, tree nlhs) |
9a4a3348 | 222 | { |
c64f38bf | 223 | if (MAY_HAVE_DEBUG_BIND_STMTS) |
9a4a3348 | 224 | { |
225 | tree lhs = gimple_get_lhs (stmt); | |
226 | ||
227 | gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt); | |
228 | ||
229 | insert_debug_temp_for_var_def (NULL, lhs); | |
230 | } | |
231 | ||
232 | gimple_set_lhs (stmt, nlhs); | |
233 | } | |
234 | ||
235 | ||
9845d120 | 236 | /* Given a tree for an expression for which we might want to emit |
237 | locations or values in debug information (generally a variable, but | |
238 | we might deal with other kinds of trees in the future), return the | |
239 | tree that should be used as the variable of a DEBUG_BIND STMT or | |
240 | VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */ | |
241 | ||
242 | tree | |
243 | target_for_debug_bind (tree var) | |
244 | { | |
c64f38bf | 245 | if (!MAY_HAVE_DEBUG_BIND_STMTS) |
9845d120 | 246 | return NULL_TREE; |
247 | ||
ec11736b | 248 | if (TREE_CODE (var) == SSA_NAME) |
249 | { | |
250 | var = SSA_NAME_VAR (var); | |
251 | if (var == NULL_TREE) | |
252 | return NULL_TREE; | |
253 | } | |
254 | ||
53e9c5c4 | 255 | if ((!VAR_P (var) || VAR_DECL_IS_VIRTUAL_OPERAND (var)) |
9845d120 | 256 | && TREE_CODE (var) != PARM_DECL) |
257 | return NULL_TREE; | |
258 | ||
259 | if (DECL_HAS_VALUE_EXPR_P (var)) | |
260 | return target_for_debug_bind (DECL_VALUE_EXPR (var)); | |
261 | ||
262 | if (DECL_IGNORED_P (var)) | |
263 | return NULL_TREE; | |
264 | ||
b03e5397 | 265 | /* var-tracking only tracks registers. */ |
266 | if (!is_gimple_reg_type (TREE_TYPE (var))) | |
267 | return NULL_TREE; | |
9845d120 | 268 | |
269 | return var; | |
270 | } | |
271 | ||
272 | /* Called via walk_tree, look for SSA_NAMEs that have already been | |
273 | released. */ | |
274 | ||
275 | static tree | |
276 | find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_) | |
277 | { | |
278 | struct walk_stmt_info *wi = (struct walk_stmt_info *) data_; | |
279 | ||
43e54ec3 | 280 | if (wi && wi->is_lhs) |
9845d120 | 281 | return NULL_TREE; |
282 | ||
283 | if (TREE_CODE (*tp) == SSA_NAME) | |
284 | { | |
285 | if (SSA_NAME_IN_FREE_LIST (*tp)) | |
286 | return *tp; | |
287 | ||
288 | *walk_subtrees = 0; | |
289 | } | |
290 | else if (IS_TYPE_OR_DECL_P (*tp)) | |
291 | *walk_subtrees = 0; | |
292 | ||
293 | return NULL_TREE; | |
294 | } | |
295 | ||
688ff29b | 296 | /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced |
297 | by other DEBUG stmts, and replace uses of the DEF with the | |
298 | newly-created debug temp. */ | |
299 | ||
9845d120 | 300 | void |
688ff29b | 301 | insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var) |
9845d120 | 302 | { |
303 | imm_use_iterator imm_iter; | |
9845d120 | 304 | use_operand_p use_p; |
42acab1c | 305 | gimple *stmt; |
306 | gimple *def_stmt = NULL; | |
688ff29b | 307 | int usecount = 0; |
9845d120 | 308 | tree value = NULL; |
9845d120 | 309 | |
c64f38bf | 310 | if (!MAY_HAVE_DEBUG_BIND_STMTS) |
9845d120 | 311 | return; |
312 | ||
afed704c | 313 | /* If this name has already been registered for replacement, do nothing |
314 | as anything that uses this name isn't in SSA form. */ | |
315 | if (name_registered_for_update_p (var)) | |
316 | return; | |
317 | ||
318 | /* Check whether there are debug stmts that reference this variable and, | |
319 | if there are, decide whether we should use a debug temp. */ | |
688ff29b | 320 | FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var) |
9845d120 | 321 | { |
688ff29b | 322 | stmt = USE_STMT (use_p); |
9845d120 | 323 | |
688ff29b | 324 | if (!gimple_debug_bind_p (stmt)) |
9845d120 | 325 | continue; |
326 | ||
688ff29b | 327 | if (usecount++) |
328 | break; | |
329 | ||
330 | if (gimple_debug_bind_get_value (stmt) != var) | |
9845d120 | 331 | { |
688ff29b | 332 | /* Count this as an additional use, so as to make sure we |
333 | use a temp unless VAR's definition has a SINGLE_RHS that | |
334 | can be shared. */ | |
335 | usecount++; | |
336 | break; | |
337 | } | |
338 | } | |
9845d120 | 339 | |
688ff29b | 340 | if (!usecount) |
341 | return; | |
9845d120 | 342 | |
688ff29b | 343 | if (gsi) |
344 | def_stmt = gsi_stmt (*gsi); | |
345 | else | |
346 | def_stmt = SSA_NAME_DEF_STMT (var); | |
9845d120 | 347 | |
688ff29b | 348 | /* If we didn't get an insertion point, and the stmt has already |
349 | been removed, we won't be able to insert the debug bind stmt, so | |
350 | we'll have to drop debug information. */ | |
43e54ec3 | 351 | if (gimple_code (def_stmt) == GIMPLE_PHI) |
352 | { | |
1a91d914 | 353 | value = degenerate_phi_result (as_a <gphi *> (def_stmt)); |
43e54ec3 | 354 | if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL)) |
355 | value = NULL; | |
a05063ec | 356 | /* error_mark_node is what fixup_noreturn_call changes PHI arguments |
357 | to. */ | |
358 | else if (value == error_mark_node) | |
359 | value = NULL; | |
43e54ec3 | 360 | } |
361 | else if (is_gimple_assign (def_stmt)) | |
688ff29b | 362 | { |
363 | bool no_value = false; | |
9845d120 | 364 | |
688ff29b | 365 | if (!dom_info_available_p (CDI_DOMINATORS)) |
366 | { | |
367 | struct walk_stmt_info wi; | |
368 | ||
369 | memset (&wi, 0, sizeof (wi)); | |
370 | ||
371 | /* When removing blocks without following reverse dominance | |
372 | order, we may sometimes encounter SSA_NAMEs that have | |
373 | already been released, referenced in other SSA_DEFs that | |
374 | we're about to release. Consider: | |
375 | ||
376 | <bb X>: | |
377 | v_1 = foo; | |
378 | ||
379 | <bb Y>: | |
380 | w_2 = v_1 + bar; | |
381 | # DEBUG w => w_2 | |
382 | ||
383 | If we deleted BB X first, propagating the value of w_2 | |
384 | won't do us any good. It's too late to recover their | |
385 | original definition of v_1: when it was deleted, it was | |
386 | only referenced in other DEFs, it couldn't possibly know | |
387 | it should have been retained, and propagating every | |
388 | single DEF just in case it might have to be propagated | |
389 | into a DEBUG STMT would probably be too wasteful. | |
390 | ||
391 | When dominator information is not readily available, we | |
392 | check for and accept some loss of debug information. But | |
393 | if it is available, there's no excuse for us to remove | |
394 | blocks in the wrong order, so we don't even check for | |
395 | dead SSA NAMEs. SSA verification shall catch any | |
396 | errors. */ | |
397 | if ((!gsi && !gimple_bb (def_stmt)) | |
7f9256fe | 398 | || walk_gimple_op (def_stmt, find_released_ssa_name, &wi)) |
688ff29b | 399 | no_value = true; |
9845d120 | 400 | } |
401 | ||
688ff29b | 402 | if (!no_value) |
403 | value = gimple_assign_rhs_to_tree (def_stmt); | |
404 | } | |
405 | ||
406 | if (value) | |
407 | { | |
408 | /* If there's a single use of VAR, and VAR is the entire debug | |
409 | expression (usecount would have been incremented again | |
410 | otherwise), and the definition involves only constants and | |
411 | SSA names, then we can propagate VALUE into this single use, | |
412 | avoiding the temp. | |
413 | ||
414 | We can also avoid using a temp if VALUE can be shared and | |
415 | propagated into all uses, without generating expressions that | |
416 | wouldn't be valid gimple RHSs. | |
417 | ||
418 | Other cases that would require unsharing or non-gimple RHSs | |
419 | are deferred to a debug temp, although we could avoid temps | |
420 | at the expense of duplication of expressions. */ | |
421 | ||
422 | if (CONSTANT_CLASS_P (value) | |
43e54ec3 | 423 | || gimple_code (def_stmt) == GIMPLE_PHI |
688ff29b | 424 | || (usecount == 1 |
425 | && (!gimple_assign_single_p (def_stmt) | |
426 | || is_gimple_min_invariant (value))) | |
427 | || is_gimple_reg (value)) | |
11db727d | 428 | ; |
688ff29b | 429 | else |
9845d120 | 430 | { |
1a91d914 | 431 | gdebug *def_temp; |
688ff29b | 432 | tree vexpr = make_node (DEBUG_EXPR_DECL); |
433 | ||
434 | def_temp = gimple_build_debug_bind (vexpr, | |
435 | unshare_expr (value), | |
436 | def_stmt); | |
437 | ||
438 | DECL_ARTIFICIAL (vexpr) = 1; | |
439 | TREE_TYPE (vexpr) = TREE_TYPE (value); | |
440 | if (DECL_P (value)) | |
adc78298 | 441 | SET_DECL_MODE (vexpr, DECL_MODE (value)); |
688ff29b | 442 | else |
adc78298 | 443 | SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (value))); |
9845d120 | 444 | |
688ff29b | 445 | if (gsi) |
446 | gsi_insert_before (gsi, def_temp, GSI_SAME_STMT); | |
447 | else | |
9845d120 | 448 | { |
688ff29b | 449 | gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt); |
450 | gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT); | |
9845d120 | 451 | } |
452 | ||
688ff29b | 453 | value = vexpr; |
9845d120 | 454 | } |
688ff29b | 455 | } |
9845d120 | 456 | |
688ff29b | 457 | FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var) |
458 | { | |
459 | if (!gimple_debug_bind_p (stmt)) | |
460 | continue; | |
461 | ||
462 | if (value) | |
02a562db | 463 | { |
464 | FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter) | |
465 | /* unshare_expr is not needed here. vexpr is either a | |
466 | SINGLE_RHS, that can be safely shared, some other RHS | |
467 | that was unshared when we found it had a single debug | |
468 | use, or a DEBUG_EXPR_DECL, that can be safely | |
469 | shared. */ | |
11db727d | 470 | SET_USE (use_p, unshare_expr (value)); |
02a562db | 471 | /* If we didn't replace uses with a debug decl fold the |
472 | resulting expression. Otherwise we end up with invalid IL. */ | |
473 | if (TREE_CODE (value) != DEBUG_EXPR_DECL) | |
50aacf4c | 474 | { |
475 | gimple_stmt_iterator gsi = gsi_for_stmt (stmt); | |
476 | fold_stmt_inplace (&gsi); | |
477 | } | |
02a562db | 478 | } |
688ff29b | 479 | else |
480 | gimple_debug_bind_reset_value (stmt); | |
9845d120 | 481 | |
482 | update_stmt (stmt); | |
483 | } | |
484 | } | |
485 | ||
486 | ||
688ff29b | 487 | /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by |
488 | other DEBUG stmts, and replace uses of the DEF with the | |
489 | newly-created debug temp. */ | |
9845d120 | 490 | |
491 | void | |
688ff29b | 492 | insert_debug_temps_for_defs (gimple_stmt_iterator *gsi) |
9845d120 | 493 | { |
42acab1c | 494 | gimple *stmt; |
9845d120 | 495 | ssa_op_iter op_iter; |
496 | def_operand_p def_p; | |
497 | ||
c64f38bf | 498 | if (!MAY_HAVE_DEBUG_BIND_STMTS) |
9845d120 | 499 | return; |
500 | ||
688ff29b | 501 | stmt = gsi_stmt (*gsi); |
502 | ||
43e54ec3 | 503 | FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF) |
9845d120 | 504 | { |
505 | tree var = DEF_FROM_PTR (def_p); | |
506 | ||
507 | if (TREE_CODE (var) != SSA_NAME) | |
508 | continue; | |
509 | ||
688ff29b | 510 | insert_debug_temp_for_var_def (gsi, var); |
9845d120 | 511 | } |
512 | } | |
513 | ||
8ebd8f29 | 514 | /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */ |
515 | ||
516 | void | |
42acab1c | 517 | reset_debug_uses (gimple *stmt) |
8ebd8f29 | 518 | { |
519 | ssa_op_iter op_iter; | |
520 | def_operand_p def_p; | |
521 | imm_use_iterator imm_iter; | |
42acab1c | 522 | gimple *use_stmt; |
8ebd8f29 | 523 | |
c64f38bf | 524 | if (!MAY_HAVE_DEBUG_BIND_STMTS) |
8ebd8f29 | 525 | return; |
526 | ||
527 | FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF) | |
528 | { | |
529 | tree var = DEF_FROM_PTR (def_p); | |
530 | ||
531 | if (TREE_CODE (var) != SSA_NAME) | |
532 | continue; | |
533 | ||
534 | FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var) | |
535 | { | |
536 | if (!gimple_debug_bind_p (use_stmt)) | |
537 | continue; | |
538 | ||
539 | gimple_debug_bind_reset_value (use_stmt); | |
540 | update_stmt (use_stmt); | |
541 | } | |
542 | } | |
543 | } | |
544 | ||
1b223194 | 545 | /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing |
546 | dominated stmts before their dominators, so that release_ssa_defs | |
547 | stands a chance of propagating DEFs into debug bind stmts. */ | |
548 | ||
549 | void | |
550 | release_defs_bitset (bitmap toremove) | |
551 | { | |
552 | unsigned j; | |
553 | bitmap_iterator bi; | |
554 | ||
555 | /* Performing a topological sort is probably overkill, this will | |
556 | most likely run in slightly superlinear time, rather than the | |
557 | pathological quadratic worst case. */ | |
558 | while (!bitmap_empty_p (toremove)) | |
f79643b7 | 559 | { |
560 | unsigned to_remove_bit = -1U; | |
561 | EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi) | |
562 | { | |
563 | if (to_remove_bit != -1U) | |
564 | { | |
565 | bitmap_clear_bit (toremove, to_remove_bit); | |
566 | to_remove_bit = -1U; | |
567 | } | |
568 | ||
569 | bool remove_now = true; | |
570 | tree var = ssa_name (j); | |
571 | gimple *stmt; | |
572 | imm_use_iterator uit; | |
573 | ||
574 | FOR_EACH_IMM_USE_STMT (stmt, uit, var) | |
575 | { | |
576 | ssa_op_iter dit; | |
577 | def_operand_p def_p; | |
578 | ||
579 | /* We can't propagate PHI nodes into debug stmts. */ | |
580 | if (gimple_code (stmt) == GIMPLE_PHI | |
581 | || is_gimple_debug (stmt)) | |
582 | continue; | |
583 | ||
584 | /* If we find another definition to remove that uses | |
585 | the one we're looking at, defer the removal of this | |
586 | one, so that it can be propagated into debug stmts | |
587 | after the other is. */ | |
588 | FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF) | |
589 | { | |
590 | tree odef = DEF_FROM_PTR (def_p); | |
591 | ||
592 | if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef))) | |
593 | { | |
594 | remove_now = false; | |
595 | break; | |
596 | } | |
597 | } | |
598 | ||
599 | if (!remove_now) | |
600 | BREAK_FROM_IMM_USE_STMT (uit); | |
601 | } | |
602 | ||
603 | if (remove_now) | |
604 | { | |
605 | gimple *def = SSA_NAME_DEF_STMT (var); | |
606 | gimple_stmt_iterator gsi = gsi_for_stmt (def); | |
607 | ||
608 | if (gimple_code (def) == GIMPLE_PHI) | |
609 | remove_phi_node (&gsi, true); | |
610 | else | |
611 | { | |
612 | gsi_remove (&gsi, true); | |
613 | release_defs (def); | |
614 | } | |
615 | ||
616 | to_remove_bit = j; | |
617 | } | |
618 | } | |
619 | if (to_remove_bit != -1U) | |
620 | bitmap_clear_bit (toremove, to_remove_bit); | |
621 | } | |
622 | ||
1b223194 | 623 | } |
624 | ||
fcc0ec6b | 625 | /* Verify virtual SSA form. */ |
626 | ||
627 | bool | |
628 | verify_vssa (basic_block bb, tree current_vdef, sbitmap visited) | |
629 | { | |
630 | bool err = false; | |
631 | ||
632 | if (bitmap_bit_p (visited, bb->index)) | |
633 | return false; | |
634 | ||
635 | bitmap_set_bit (visited, bb->index); | |
636 | ||
637 | /* Pick up the single virtual PHI def. */ | |
638 | gphi *phi = NULL; | |
639 | for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si); | |
640 | gsi_next (&si)) | |
641 | { | |
642 | tree res = gimple_phi_result (si.phi ()); | |
643 | if (virtual_operand_p (res)) | |
644 | { | |
645 | if (phi) | |
646 | { | |
647 | error ("multiple virtual PHI nodes in BB %d", bb->index); | |
1ffa4346 | 648 | print_gimple_stmt (stderr, phi, 0); |
649 | print_gimple_stmt (stderr, si.phi (), 0); | |
fcc0ec6b | 650 | err = true; |
651 | } | |
652 | else | |
653 | phi = si.phi (); | |
654 | } | |
655 | } | |
656 | if (phi) | |
657 | { | |
658 | current_vdef = gimple_phi_result (phi); | |
659 | if (TREE_CODE (current_vdef) != SSA_NAME) | |
660 | { | |
661 | error ("virtual definition is not an SSA name"); | |
1ffa4346 | 662 | print_gimple_stmt (stderr, phi, 0); |
fcc0ec6b | 663 | err = true; |
664 | } | |
665 | } | |
666 | ||
667 | /* Verify stmts. */ | |
668 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); | |
669 | gsi_next (&gsi)) | |
670 | { | |
671 | gimple *stmt = gsi_stmt (gsi); | |
672 | tree vuse = gimple_vuse (stmt); | |
673 | if (vuse) | |
674 | { | |
675 | if (vuse != current_vdef) | |
676 | { | |
677 | error ("stmt with wrong VUSE"); | |
678 | print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); | |
679 | fprintf (stderr, "expected "); | |
1ffa4346 | 680 | print_generic_expr (stderr, current_vdef); |
fcc0ec6b | 681 | fprintf (stderr, "\n"); |
682 | err = true; | |
683 | } | |
684 | tree vdef = gimple_vdef (stmt); | |
685 | if (vdef) | |
686 | { | |
687 | current_vdef = vdef; | |
688 | if (TREE_CODE (current_vdef) != SSA_NAME) | |
689 | { | |
690 | error ("virtual definition is not an SSA name"); | |
1ffa4346 | 691 | print_gimple_stmt (stderr, phi, 0); |
fcc0ec6b | 692 | err = true; |
693 | } | |
694 | } | |
695 | } | |
696 | } | |
697 | ||
698 | /* Verify destination PHI uses and recurse. */ | |
699 | edge_iterator ei; | |
700 | edge e; | |
701 | FOR_EACH_EDGE (e, ei, bb->succs) | |
702 | { | |
703 | gphi *phi = get_virtual_phi (e->dest); | |
704 | if (phi | |
705 | && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef) | |
706 | { | |
707 | error ("PHI node with wrong VUSE on edge from BB %d", | |
708 | e->src->index); | |
709 | print_gimple_stmt (stderr, phi, 0, TDF_VOPS); | |
710 | fprintf (stderr, "expected "); | |
1ffa4346 | 711 | print_generic_expr (stderr, current_vdef); |
fcc0ec6b | 712 | fprintf (stderr, "\n"); |
713 | err = true; | |
714 | } | |
715 | ||
716 | /* Recurse. */ | |
717 | err |= verify_vssa (e->dest, current_vdef, visited); | |
718 | } | |
719 | ||
720 | return err; | |
721 | } | |
722 | ||
81d08033 | 723 | /* Return true if SSA_NAME is malformed and mark it visited. |
4ee9c684 | 724 | |
81d08033 | 725 | IS_VIRTUAL is true if this SSA_NAME was found inside a virtual |
726 | operand. */ | |
4ee9c684 | 727 | |
728 | static bool | |
81d08033 | 729 | verify_ssa_name (tree ssa_name, bool is_virtual) |
4ee9c684 | 730 | { |
4ee9c684 | 731 | if (TREE_CODE (ssa_name) != SSA_NAME) |
732 | { | |
0a81f5a0 | 733 | error ("expected an SSA_NAME object"); |
81d08033 | 734 | return true; |
4ee9c684 | 735 | } |
736 | ||
bcef4e8e | 737 | if (SSA_NAME_IN_FREE_LIST (ssa_name)) |
cbbefea4 | 738 | { |
bcef4e8e | 739 | error ("found an SSA_NAME that had been released into the free pool"); |
cbbefea4 | 740 | return true; |
741 | } | |
742 | ||
bcef4e8e | 743 | if (SSA_NAME_VAR (ssa_name) != NULL_TREE |
744 | && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name))) | |
81d08033 | 745 | { |
bcef4e8e | 746 | error ("type mismatch between an SSA_NAME and its symbol"); |
81d08033 | 747 | return true; |
748 | } | |
749 | ||
7c782c9b | 750 | if (is_virtual && !virtual_operand_p (ssa_name)) |
81d08033 | 751 | { |
0a81f5a0 | 752 | error ("found a virtual definition for a GIMPLE register"); |
81d08033 | 753 | return true; |
754 | } | |
755 | ||
dd277d48 | 756 | if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun)) |
757 | { | |
758 | error ("virtual SSA name for non-VOP decl"); | |
759 | return true; | |
760 | } | |
761 | ||
7c782c9b | 762 | if (!is_virtual && virtual_operand_p (ssa_name)) |
81d08033 | 763 | { |
0a81f5a0 | 764 | error ("found a real definition for a non-register"); |
81d08033 | 765 | return true; |
766 | } | |
767 | ||
4fb5e5ca | 768 | if (SSA_NAME_IS_DEFAULT_DEF (ssa_name) |
75a70cf9 | 769 | && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))) |
4fb5e5ca | 770 | { |
771 | error ("found a default name with a non-empty defining statement"); | |
772 | return true; | |
773 | } | |
774 | ||
81d08033 | 775 | return false; |
776 | } | |
777 | ||
778 | ||
779 | /* Return true if the definition of SSA_NAME at block BB is malformed. | |
780 | ||
781 | STMT is the statement where SSA_NAME is created. | |
782 | ||
783 | DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME | |
784 | version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set, | |
785 | it means that the block in that array slot contains the | |
786 | definition of SSA_NAME. | |
787 | ||
4fb5e5ca | 788 | IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */ |
81d08033 | 789 | |
790 | static bool | |
791 | verify_def (basic_block bb, basic_block *definition_block, tree ssa_name, | |
42acab1c | 792 | gimple *stmt, bool is_virtual) |
81d08033 | 793 | { |
794 | if (verify_ssa_name (ssa_name, is_virtual)) | |
795 | goto err; | |
796 | ||
ec11736b | 797 | if (SSA_NAME_VAR (ssa_name) |
798 | && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL | |
524a0531 | 799 | && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name))) |
800 | { | |
bf776685 | 801 | error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set"); |
524a0531 | 802 | goto err; |
803 | } | |
804 | ||
4ee9c684 | 805 | if (definition_block[SSA_NAME_VERSION (ssa_name)]) |
806 | { | |
807 | error ("SSA_NAME created in two different blocks %i and %i", | |
808 | definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index); | |
81d08033 | 809 | goto err; |
4ee9c684 | 810 | } |
811 | ||
812 | definition_block[SSA_NAME_VERSION (ssa_name)] = bb; | |
813 | ||
814 | if (SSA_NAME_DEF_STMT (ssa_name) != stmt) | |
815 | { | |
816 | error ("SSA_NAME_DEF_STMT is wrong"); | |
4ee9c684 | 817 | fprintf (stderr, "Expected definition statement:\n"); |
75a70cf9 | 818 | print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS); |
4ee9c684 | 819 | fprintf (stderr, "\nActual definition statement:\n"); |
75a70cf9 | 820 | print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); |
81d08033 | 821 | goto err; |
4ee9c684 | 822 | } |
823 | ||
81d08033 | 824 | return false; |
825 | ||
826 | err: | |
827 | fprintf (stderr, "while verifying SSA_NAME "); | |
1ffa4346 | 828 | print_generic_expr (stderr, ssa_name); |
81d08033 | 829 | fprintf (stderr, " in statement\n"); |
75a70cf9 | 830 | print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); |
81d08033 | 831 | |
832 | return true; | |
4ee9c684 | 833 | } |
834 | ||
835 | ||
836 | /* Return true if the use of SSA_NAME at statement STMT in block BB is | |
837 | malformed. | |
838 | ||
839 | DEF_BB is the block where SSA_NAME was found to be created. | |
840 | ||
841 | IDOM contains immediate dominator information for the flowgraph. | |
842 | ||
843 | CHECK_ABNORMAL is true if the caller wants to check whether this use | |
844 | is flowing through an abnormal edge (only used when checking PHI | |
81d08033 | 845 | arguments). |
846 | ||
e599eacb | 847 | If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names |
848 | that are defined before STMT in basic block BB. */ | |
4ee9c684 | 849 | |
850 | static bool | |
22aa74c4 | 851 | verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p, |
42acab1c | 852 | gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb) |
4ee9c684 | 853 | { |
854 | bool err = false; | |
22aa74c4 | 855 | tree ssa_name = USE_FROM_PTR (use_p); |
4ee9c684 | 856 | |
22aa74c4 | 857 | if (!TREE_VISITED (ssa_name)) |
858 | if (verify_imm_links (stderr, ssa_name)) | |
859 | err = true; | |
860 | ||
06c1e160 | 861 | TREE_VISITED (ssa_name) = 1; |
81d08033 | 862 | |
75a70cf9 | 863 | if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)) |
4fb5e5ca | 864 | && SSA_NAME_IS_DEFAULT_DEF (ssa_name)) |
81d08033 | 865 | ; /* Default definitions have empty statements. Nothing to do. */ |
4ee9c684 | 866 | else if (!def_bb) |
867 | { | |
0a81f5a0 | 868 | error ("missing definition"); |
4ee9c684 | 869 | err = true; |
870 | } | |
871 | else if (bb != def_bb | |
872 | && !dominated_by_p (CDI_DOMINATORS, bb, def_bb)) | |
873 | { | |
0a81f5a0 | 874 | error ("definition in block %i does not dominate use in block %i", |
4ee9c684 | 875 | def_bb->index, bb->index); |
876 | err = true; | |
877 | } | |
e599eacb | 878 | else if (bb == def_bb |
879 | && names_defined_in_bb != NULL | |
880 | && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name))) | |
881 | { | |
0a81f5a0 | 882 | error ("definition in block %i follows the use", def_bb->index); |
e599eacb | 883 | err = true; |
884 | } | |
4ee9c684 | 885 | |
886 | if (check_abnormal | |
887 | && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) | |
888 | { | |
889 | error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set"); | |
890 | err = true; | |
891 | } | |
892 | ||
48e1416a | 893 | /* Make sure the use is in an appropriate list by checking the previous |
20833d12 | 894 | element to make sure it's the same. */ |
22aa74c4 | 895 | if (use_p->prev == NULL) |
896 | { | |
0a81f5a0 | 897 | error ("no immediate_use list"); |
22aa74c4 | 898 | err = true; |
899 | } | |
900 | else | |
901 | { | |
75a70cf9 | 902 | tree listvar; |
22aa74c4 | 903 | if (use_p->prev->use == NULL) |
75a70cf9 | 904 | listvar = use_p->prev->loc.ssa_name; |
22aa74c4 | 905 | else |
906 | listvar = USE_FROM_PTR (use_p->prev); | |
907 | if (listvar != ssa_name) | |
908 | { | |
0a81f5a0 | 909 | error ("wrong immediate use list"); |
22aa74c4 | 910 | err = true; |
911 | } | |
912 | } | |
913 | ||
4ee9c684 | 914 | if (err) |
915 | { | |
916 | fprintf (stderr, "for SSA_NAME: "); | |
94dcc41f | 917 | print_generic_expr (stderr, ssa_name, TDF_VOPS); |
88dbf20f | 918 | fprintf (stderr, " in statement:\n"); |
75a70cf9 | 919 | print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); |
4ee9c684 | 920 | } |
921 | ||
922 | return err; | |
923 | } | |
924 | ||
925 | ||
926 | /* Return true if any of the arguments for PHI node PHI at block BB is | |
927 | malformed. | |
928 | ||
4fb5e5ca | 929 | DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME |
930 | version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set, | |
931 | it means that the block in that array slot contains the | |
932 | definition of SSA_NAME. */ | |
4ee9c684 | 933 | |
934 | static bool | |
1a91d914 | 935 | verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block) |
4ee9c684 | 936 | { |
937 | edge e; | |
938 | bool err = false; | |
75a70cf9 | 939 | size_t i, phi_num_args = gimple_phi_num_args (phi); |
4ee9c684 | 940 | |
79aae762 | 941 | if (EDGE_COUNT (bb->preds) != phi_num_args) |
942 | { | |
0a81f5a0 | 943 | error ("incoming edge count does not match number of PHI arguments"); |
79aae762 | 944 | err = true; |
945 | goto error; | |
946 | } | |
947 | ||
4ee9c684 | 948 | for (i = 0; i < phi_num_args; i++) |
949 | { | |
75a70cf9 | 950 | use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i); |
22aa74c4 | 951 | tree op = USE_FROM_PTR (op_p); |
952 | ||
e0ebfc19 | 953 | e = EDGE_PRED (bb, i); |
cc890649 | 954 | |
955 | if (op == NULL_TREE) | |
956 | { | |
0a81f5a0 | 957 | error ("PHI argument is missing for edge %d->%d", |
cc890649 | 958 | e->src->index, |
959 | e->dest->index); | |
960 | err = true; | |
961 | goto error; | |
962 | } | |
963 | ||
79aae762 | 964 | if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op)) |
965 | { | |
966 | error ("PHI argument is not SSA_NAME, or invariant"); | |
967 | err = true; | |
968 | } | |
969 | ||
4ee9c684 | 970 | if (TREE_CODE (op) == SSA_NAME) |
4fb5e5ca | 971 | { |
7c782c9b | 972 | err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi))); |
4fb5e5ca | 973 | err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)], |
974 | op_p, phi, e->flags & EDGE_ABNORMAL, NULL); | |
975 | } | |
4ee9c684 | 976 | |
86f2ad37 | 977 | if (TREE_CODE (op) == ADDR_EXPR) |
978 | { | |
979 | tree base = TREE_OPERAND (op, 0); | |
980 | while (handled_component_p (base)) | |
981 | base = TREE_OPERAND (base, 0); | |
53e9c5c4 | 982 | if ((VAR_P (base) |
86f2ad37 | 983 | || TREE_CODE (base) == PARM_DECL |
984 | || TREE_CODE (base) == RESULT_DECL) | |
985 | && !TREE_ADDRESSABLE (base)) | |
986 | { | |
987 | error ("address taken, but ADDRESSABLE bit not set"); | |
988 | err = true; | |
989 | } | |
990 | } | |
991 | ||
4ee9c684 | 992 | if (e->dest != bb) |
993 | { | |
0a81f5a0 | 994 | error ("wrong edge %d->%d for PHI argument", |
7781aa77 | 995 | e->src->index, e->dest->index); |
4ee9c684 | 996 | err = true; |
997 | } | |
998 | ||
4ee9c684 | 999 | if (err) |
1000 | { | |
1001 | fprintf (stderr, "PHI argument\n"); | |
94dcc41f | 1002 | print_generic_stmt (stderr, op, TDF_VOPS); |
81d08033 | 1003 | goto error; |
4ee9c684 | 1004 | } |
4ee9c684 | 1005 | } |
1006 | ||
81d08033 | 1007 | error: |
4ee9c684 | 1008 | if (err) |
1009 | { | |
1010 | fprintf (stderr, "for PHI node\n"); | |
75a70cf9 | 1011 | print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS); |
4ee9c684 | 1012 | } |
1013 | ||
1014 | ||
1015 | return err; | |
1016 | } | |
1017 | ||
1018 | ||
1019 | /* Verify common invariants in the SSA web. | |
1020 | TODO: verify the variable annotations. */ | |
1021 | ||
4b987fac | 1022 | DEBUG_FUNCTION void |
71b65939 | 1023 | verify_ssa (bool check_modified_stmt, bool check_ssa_operands) |
4ee9c684 | 1024 | { |
4ee9c684 | 1025 | basic_block bb; |
680a19b9 | 1026 | basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names); |
43daa21e | 1027 | ssa_op_iter iter; |
1028 | tree op; | |
50b08d37 | 1029 | enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS); |
035def86 | 1030 | auto_bitmap names_defined_in_bb; |
4ee9c684 | 1031 | |
dd277d48 | 1032 | gcc_assert (!need_ssa_update_p (cfun)); |
095dcfa3 | 1033 | |
4ee9c684 | 1034 | timevar_push (TV_TREE_SSA_VERIFY); |
1035 | ||
4ee9c684 | 1036 | { |
3e7e7fdd | 1037 | /* Keep track of SSA names present in the IL. */ |
1038 | size_t i; | |
1039 | tree name; | |
1040 | hash_map <void *, tree> ssa_info; | |
85f3d834 | 1041 | |
3e7e7fdd | 1042 | FOR_EACH_SSA_NAME (i, name, cfun) |
f211616e | 1043 | { |
3e7e7fdd | 1044 | gimple *stmt; |
1045 | TREE_VISITED (name) = 0; | |
1046 | ||
1047 | verify_ssa_name (name, virtual_operand_p (name)); | |
1048 | ||
1049 | stmt = SSA_NAME_DEF_STMT (name); | |
1050 | if (!gimple_nop_p (stmt)) | |
1051 | { | |
1052 | basic_block bb = gimple_bb (stmt); | |
1053 | if (verify_def (bb, definition_block, | |
1054 | name, stmt, virtual_operand_p (name))) | |
1055 | goto err; | |
1056 | } | |
1057 | ||
1058 | void *info = NULL; | |
1059 | if (POINTER_TYPE_P (TREE_TYPE (name))) | |
1060 | info = SSA_NAME_PTR_INFO (name); | |
1061 | else if (INTEGRAL_TYPE_P (TREE_TYPE (name))) | |
1062 | info = SSA_NAME_RANGE_INFO (name); | |
1063 | if (info) | |
1064 | { | |
1065 | bool existed; | |
1066 | tree &val = ssa_info.get_or_insert (info, &existed); | |
1067 | if (existed) | |
1068 | { | |
1069 | error ("shared SSA name info"); | |
1ffa4346 | 1070 | print_generic_expr (stderr, val); |
3e7e7fdd | 1071 | fprintf (stderr, " and "); |
1ffa4346 | 1072 | print_generic_expr (stderr, name); |
3e7e7fdd | 1073 | fprintf (stderr, "\n"); |
1074 | goto err; | |
1075 | } | |
1076 | else | |
1077 | val = name; | |
1078 | } | |
4ee9c684 | 1079 | } |
1080 | } | |
1081 | ||
79aae762 | 1082 | calculate_dominance_info (CDI_DOMINATORS); |
4ee9c684 | 1083 | |
1084 | /* Now verify all the uses and make sure they agree with the definitions | |
1085 | found in the previous pass. */ | |
fc00614f | 1086 | FOR_EACH_BB_FN (bb, cfun) |
4ee9c684 | 1087 | { |
1088 | edge e; | |
cd665a06 | 1089 | edge_iterator ei; |
4ee9c684 | 1090 | |
1091 | /* Make sure that all edges have a clear 'aux' field. */ | |
cd665a06 | 1092 | FOR_EACH_EDGE (e, ei, bb->preds) |
4ee9c684 | 1093 | { |
1094 | if (e->aux) | |
1095 | { | |
0a81f5a0 | 1096 | error ("AUX pointer initialized for edge %d->%d", e->src->index, |
4ee9c684 | 1097 | e->dest->index); |
81d08033 | 1098 | goto err; |
4ee9c684 | 1099 | } |
1100 | } | |
1101 | ||
1102 | /* Verify the arguments for every PHI node in the block. */ | |
1a91d914 | 1103 | for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
e599eacb | 1104 | { |
1a91d914 | 1105 | gphi *phi = gsi.phi (); |
e599eacb | 1106 | if (verify_phi_args (phi, bb, definition_block)) |
1107 | goto err; | |
4fb5e5ca | 1108 | |
e599eacb | 1109 | bitmap_set_bit (names_defined_in_bb, |
75a70cf9 | 1110 | SSA_NAME_VERSION (gimple_phi_result (phi))); |
e599eacb | 1111 | } |
4ee9c684 | 1112 | |
81d08033 | 1113 | /* Now verify all the uses and vuses in every statement of the block. */ |
1a91d914 | 1114 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); |
1115 | gsi_next (&gsi)) | |
4ee9c684 | 1116 | { |
42acab1c | 1117 | gimple *stmt = gsi_stmt (gsi); |
22aa74c4 | 1118 | use_operand_p use_p; |
1119 | ||
75a70cf9 | 1120 | if (check_modified_stmt && gimple_modified_p (stmt)) |
22aa74c4 | 1121 | { |
4fb5e5ca | 1122 | error ("stmt (%p) marked modified after optimization pass: ", |
b66731e8 | 1123 | (void *)stmt); |
75a70cf9 | 1124 | print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); |
22aa74c4 | 1125 | goto err; |
1126 | } | |
4ee9c684 | 1127 | |
71b65939 | 1128 | if (check_ssa_operands && verify_ssa_operands (cfun, stmt)) |
dd277d48 | 1129 | { |
85f3d834 | 1130 | print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); |
dd277d48 | 1131 | goto err; |
4fb5e5ca | 1132 | } |
1133 | ||
85f3d834 | 1134 | if (gimple_debug_bind_p (stmt) |
1135 | && !gimple_debug_bind_has_value_p (stmt)) | |
1136 | continue; | |
4fb5e5ca | 1137 | |
1138 | FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE) | |
4ee9c684 | 1139 | { |
22aa74c4 | 1140 | op = USE_FROM_PTR (use_p); |
81d08033 | 1141 | if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)], |
4fb5e5ca | 1142 | use_p, stmt, false, names_defined_in_bb)) |
e599eacb | 1143 | goto err; |
1144 | } | |
1145 | ||
1146 | FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS) | |
dd277d48 | 1147 | { |
1148 | if (SSA_NAME_DEF_STMT (op) != stmt) | |
1149 | { | |
1150 | error ("SSA_NAME_DEF_STMT is wrong"); | |
1151 | fprintf (stderr, "Expected definition statement:\n"); | |
1152 | print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); | |
1153 | fprintf (stderr, "\nActual definition statement:\n"); | |
1154 | print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op), | |
1155 | 4, TDF_VOPS); | |
1156 | goto err; | |
1157 | } | |
1158 | bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op)); | |
1159 | } | |
e599eacb | 1160 | } |
1161 | ||
e599eacb | 1162 | bitmap_clear (names_defined_in_bb); |
4ee9c684 | 1163 | } |
1164 | ||
81d08033 | 1165 | free (definition_block); |
095dcfa3 | 1166 | |
fcc0ec6b | 1167 | if (gimple_vop (cfun) |
1168 | && ssa_default_def (cfun, gimple_vop (cfun))) | |
1169 | { | |
1170 | auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1); | |
1171 | bitmap_clear (visited); | |
1172 | if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun), | |
1173 | ssa_default_def (cfun, gimple_vop (cfun)), visited)) | |
1174 | goto err; | |
1175 | } | |
1176 | ||
c26a6416 | 1177 | /* Restore the dominance information to its prior known state, so |
1178 | that we do not perturb the compiler's subsequent behavior. */ | |
08323300 | 1179 | if (orig_dom_state == DOM_NONE) |
1180 | free_dominance_info (CDI_DOMINATORS); | |
1181 | else | |
50b08d37 | 1182 | set_dom_info_availability (CDI_DOMINATORS, orig_dom_state); |
48e1416a | 1183 | |
4ee9c684 | 1184 | timevar_pop (TV_TREE_SSA_VERIFY); |
81d08033 | 1185 | return; |
4ee9c684 | 1186 | |
81d08033 | 1187 | err: |
0a81f5a0 | 1188 | internal_error ("verify_ssa failed"); |
4ee9c684 | 1189 | } |
1190 | ||
1191 | ||
4ee9c684 | 1192 | /* Initialize global DFA and SSA structures. */ |
1193 | ||
1194 | void | |
bcaa2770 | 1195 | init_tree_ssa (struct function *fn) |
4ee9c684 | 1196 | { |
25a27413 | 1197 | fn->gimple_df = ggc_cleared_alloc<gimple_df> (); |
2ef51f0e | 1198 | fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20); |
dd277d48 | 1199 | pt_solution_reset (&fn->gimple_df->escaped); |
bcaa2770 | 1200 | init_ssanames (fn, 0); |
4ee9c684 | 1201 | } |
1202 | ||
4ee9c684 | 1203 | /* Deallocate memory associated with SSA data structures for FNDECL. */ |
1204 | ||
1205 | void | |
d4f078b5 | 1206 | delete_tree_ssa (struct function *fn) |
4ee9c684 | 1207 | { |
d4f078b5 | 1208 | fini_ssanames (fn); |
75a70cf9 | 1209 | |
1210 | /* We no longer maintain the SSA operand cache at this point. */ | |
d4f078b5 | 1211 | if (ssa_operands_active (fn)) |
1212 | fini_ssa_operands (fn); | |
1213 | ||
1214 | fn->gimple_df->default_defs->empty (); | |
1215 | fn->gimple_df->default_defs = NULL; | |
1216 | pt_solution_reset (&fn->gimple_df->escaped); | |
1217 | if (fn->gimple_df->decls_to_pointers != NULL) | |
1218 | delete fn->gimple_df->decls_to_pointers; | |
1219 | fn->gimple_df->decls_to_pointers = NULL; | |
d4f078b5 | 1220 | fn->gimple_df = NULL; |
2fbd06b5 | 1221 | |
1222 | /* We no longer need the edge variable maps. */ | |
b1090780 | 1223 | redirect_edge_var_map_empty (); |
4ee9c684 | 1224 | } |
1225 | ||
4ee9c684 | 1226 | /* Return true if EXPR is a useless type conversion, otherwise return |
1227 | false. */ | |
1228 | ||
1229 | bool | |
1230 | tree_ssa_useless_type_conversion (tree expr) | |
1231 | { | |
1232 | /* If we have an assignment that merely uses a NOP_EXPR to change | |
1233 | the top of the RHS to the type of the LHS and the type conversion | |
1234 | is "safe", then strip away the type conversion so that we can | |
1235 | enter LHS = RHS into the const_and_copies table. */ | |
72dd6141 | 1236 | if (CONVERT_EXPR_P (expr) |
b6b93727 | 1237 | || TREE_CODE (expr) == VIEW_CONVERT_EXPR |
1238 | || TREE_CODE (expr) == NON_LVALUE_EXPR) | |
548044d8 | 1239 | return useless_type_conversion_p |
c2f47e15 | 1240 | (TREE_TYPE (expr), |
75a70cf9 | 1241 | TREE_TYPE (TREE_OPERAND (expr, 0))); |
4ee9c684 | 1242 | |
1243 | return false; | |
1244 | } | |
1245 | ||
98112881 | 1246 | /* Strip conversions from EXP according to |
1247 | tree_ssa_useless_type_conversion and return the resulting | |
1248 | expression. */ | |
1249 | ||
1250 | tree | |
1251 | tree_ssa_strip_useless_type_conversions (tree exp) | |
1252 | { | |
1253 | while (tree_ssa_useless_type_conversion (exp)) | |
1254 | exp = TREE_OPERAND (exp, 0); | |
1255 | return exp; | |
1256 | } | |
1257 | ||
44841cf7 | 1258 | /* Return true if T, as SSA_NAME, has an implicit default defined value. */ |
1d2fabca | 1259 | |
1260 | bool | |
44841cf7 | 1261 | ssa_defined_default_def_p (tree t) |
1d2fabca | 1262 | { |
1263 | tree var = SSA_NAME_VAR (t); | |
1264 | ||
1265 | if (!var) | |
1266 | ; | |
1267 | /* Parameters get their initial value from the function entry. */ | |
1268 | else if (TREE_CODE (var) == PARM_DECL) | |
44841cf7 | 1269 | return true; |
1d2fabca | 1270 | /* When returning by reference the return address is actually a hidden |
1271 | parameter. */ | |
1272 | else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var)) | |
44841cf7 | 1273 | return true; |
1d2fabca | 1274 | /* Hard register variables get their initial value from the ether. */ |
53e9c5c4 | 1275 | else if (VAR_P (var) && DECL_HARD_REGISTER (var)) |
44841cf7 | 1276 | return true; |
1277 | ||
1278 | return false; | |
1279 | } | |
1280 | ||
1281 | ||
1282 | /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what | |
1283 | should be returned if the value is only partially undefined. */ | |
1284 | ||
1285 | bool | |
1286 | ssa_undefined_value_p (tree t, bool partial) | |
1287 | { | |
1288 | gimple *def_stmt; | |
1289 | ||
1290 | if (ssa_defined_default_def_p (t)) | |
1d2fabca | 1291 | return false; |
1292 | ||
1293 | /* The value is undefined iff its definition statement is empty. */ | |
f772e50c | 1294 | def_stmt = SSA_NAME_DEF_STMT (t); |
1295 | if (gimple_nop_p (def_stmt)) | |
1296 | return true; | |
1297 | ||
1298 | /* Check if the complex was not only partially defined. */ | |
4d8d655b | 1299 | if (partial && is_gimple_assign (def_stmt) |
f772e50c | 1300 | && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR) |
1301 | { | |
1302 | tree rhs1, rhs2; | |
1303 | ||
1304 | rhs1 = gimple_assign_rhs1 (def_stmt); | |
1305 | rhs2 = gimple_assign_rhs2 (def_stmt); | |
1306 | return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1)) | |
1307 | || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2)); | |
1308 | } | |
1309 | return false; | |
1d2fabca | 1310 | } |
1311 | ||
1312 | ||
4883b250 | 1313 | /* Return TRUE iff STMT, a gimple statement, references an undefined |
1314 | SSA name. */ | |
1315 | ||
1316 | bool | |
1317 | gimple_uses_undefined_value_p (gimple *stmt) | |
1318 | { | |
1319 | ssa_op_iter iter; | |
1320 | tree op; | |
1321 | ||
1322 | FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) | |
1323 | if (ssa_undefined_value_p (op)) | |
1324 | return true; | |
1325 | ||
1326 | return false; | |
1327 | } | |
1328 | ||
1329 | ||
1330 | ||
182cf5a9 | 1331 | /* If necessary, rewrite the base of the reference tree *TP from |
1332 | a MEM_REF to a plain or converted symbol. */ | |
1333 | ||
1334 | static void | |
e70e8b13 | 1335 | maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming) |
182cf5a9 | 1336 | { |
1337 | tree sym; | |
1338 | ||
1339 | while (handled_component_p (*tp)) | |
1340 | tp = &TREE_OPERAND (*tp, 0); | |
1341 | if (TREE_CODE (*tp) == MEM_REF | |
1342 | && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR | |
182cf5a9 | 1343 | && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0)) |
1344 | && DECL_P (sym) | |
1345 | && !TREE_ADDRESSABLE (sym) | |
4f16cc83 | 1346 | && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)) |
1347 | && is_gimple_reg_type (TREE_TYPE (*tp)) | |
1348 | && ! VOID_TYPE_P (TREE_TYPE (*tp))) | |
182cf5a9 | 1349 | { |
8a62fbb0 | 1350 | if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE |
1351 | && useless_type_conversion_p (TREE_TYPE (*tp), | |
1352 | TREE_TYPE (TREE_TYPE (sym))) | |
1353 | && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1), | |
1354 | TYPE_SIZE_UNIT (TREE_TYPE (*tp)))) | |
1355 | { | |
1356 | *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym, | |
1357 | TYPE_SIZE (TREE_TYPE (*tp)), | |
1358 | int_const_binop (MULT_EXPR, | |
1359 | bitsize_int (BITS_PER_UNIT), | |
317e2a67 | 1360 | TREE_OPERAND (*tp, 1))); |
8a62fbb0 | 1361 | } |
8ba0d70d | 1362 | else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE |
1363 | && useless_type_conversion_p (TREE_TYPE (*tp), | |
1364 | TREE_TYPE (TREE_TYPE (sym)))) | |
1365 | { | |
1366 | *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1)) | |
1367 | ? REALPART_EXPR : IMAGPART_EXPR, | |
1368 | TREE_TYPE (*tp), sym); | |
1369 | } | |
4f16cc83 | 1370 | else if (integer_zerop (TREE_OPERAND (*tp, 1)) |
1371 | && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp))) | |
8a62fbb0 | 1372 | { |
1373 | if (!useless_type_conversion_p (TREE_TYPE (*tp), | |
1374 | TREE_TYPE (sym))) | |
1375 | *tp = build1 (VIEW_CONVERT_EXPR, | |
1376 | TREE_TYPE (*tp), sym); | |
1377 | else | |
1378 | *tp = sym; | |
1379 | } | |
4f16cc83 | 1380 | else if (DECL_SIZE (sym) |
1381 | && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST | |
90ca1268 | 1382 | && (known_subrange_p |
1383 | (mem_ref_offset (*tp), | |
1384 | wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))), | |
1385 | 0, wi::to_offset (DECL_SIZE_UNIT (sym)))) | |
4f16cc83 | 1386 | && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp)) |
1387 | || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))) | |
1388 | == TYPE_PRECISION (TREE_TYPE (*tp)))) | |
1389 | && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))), | |
1390 | BITS_PER_UNIT) == 0) | |
1391 | { | |
1392 | *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym, | |
1393 | TYPE_SIZE (TREE_TYPE (*tp)), | |
1394 | wide_int_to_tree (bitsizetype, | |
1395 | mem_ref_offset (*tp) | |
1396 | << LOG2_BITS_PER_UNIT)); | |
1397 | } | |
182cf5a9 | 1398 | } |
1399 | } | |
1400 | ||
cfe3cae3 | 1401 | /* For a tree REF return its base if it is the base of a MEM_REF |
1402 | that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */ | |
1403 | ||
1404 | static tree | |
1405 | non_rewritable_mem_ref_base (tree ref) | |
1406 | { | |
7345b977 | 1407 | tree base; |
cfe3cae3 | 1408 | |
1409 | /* A plain decl does not need it set. */ | |
1410 | if (DECL_P (ref)) | |
1411 | return NULL_TREE; | |
1412 | ||
7345b977 | 1413 | if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref)))) |
1414 | { | |
1415 | base = get_base_address (ref); | |
1416 | if (DECL_P (base)) | |
1417 | return base; | |
1418 | return NULL_TREE; | |
1419 | } | |
cfe3cae3 | 1420 | |
1421 | /* But watch out for MEM_REFs we cannot lower to a | |
8a62fbb0 | 1422 | VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */ |
cfe3cae3 | 1423 | if (TREE_CODE (base) == MEM_REF |
1424 | && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR) | |
1425 | { | |
1426 | tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0); | |
4f16cc83 | 1427 | if (! DECL_P (decl)) |
1428 | return NULL_TREE; | |
1429 | if (! is_gimple_reg_type (TREE_TYPE (base)) | |
606f008b | 1430 | || VOID_TYPE_P (TREE_TYPE (base)) |
1431 | || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)) | |
4f16cc83 | 1432 | return decl; |
8ba0d70d | 1433 | if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE |
1434 | || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE) | |
8a62fbb0 | 1435 | && useless_type_conversion_p (TREE_TYPE (base), |
1436 | TREE_TYPE (TREE_TYPE (decl))) | |
a5a5010f | 1437 | && known_ge (mem_ref_offset (base), 0) |
90ca1268 | 1438 | && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))), |
1439 | mem_ref_offset (base)) | |
8a62fbb0 | 1440 | && multiple_of_p (sizetype, TREE_OPERAND (base, 1), |
1441 | TYPE_SIZE_UNIT (TREE_TYPE (base)))) | |
1442 | return NULL_TREE; | |
4f16cc83 | 1443 | /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR. */ |
1444 | if (integer_zerop (TREE_OPERAND (base, 1)) | |
1445 | && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base))) | |
1446 | return NULL_TREE; | |
1447 | /* For integral typed extracts we can use a BIT_FIELD_REF. */ | |
1448 | if (DECL_SIZE (decl) | |
85d76a5d | 1449 | && TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST |
90ca1268 | 1450 | && (known_subrange_p |
1451 | (mem_ref_offset (base), | |
1452 | wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))), | |
1453 | 0, wi::to_poly_offset (DECL_SIZE_UNIT (decl)))) | |
4f16cc83 | 1454 | /* ??? We can't handle bitfield precision extracts without |
1455 | either using an alternate type for the BIT_FIELD_REF and | |
1456 | then doing a conversion or possibly adjusting the offset | |
d0abd9e0 | 1457 | according to endianness. */ |
4f16cc83 | 1458 | && (! INTEGRAL_TYPE_P (TREE_TYPE (base)) |
1459 | || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))) | |
1460 | == TYPE_PRECISION (TREE_TYPE (base)))) | |
1461 | && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))), | |
1462 | BITS_PER_UNIT) == 0) | |
1463 | return NULL_TREE; | |
1464 | return decl; | |
cfe3cae3 | 1465 | } |
1466 | ||
1467 | return NULL_TREE; | |
1468 | } | |
1469 | ||
b88d109e | 1470 | /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form. |
1471 | Otherwise return true. */ | |
1472 | ||
1473 | static bool | |
1474 | non_rewritable_lvalue_p (tree lhs) | |
1475 | { | |
1476 | /* A plain decl is always rewritable. */ | |
1477 | if (DECL_P (lhs)) | |
1478 | return false; | |
1479 | ||
e3c6a1ed | 1480 | /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in |
1481 | a reasonably efficient manner... */ | |
1482 | if ((TREE_CODE (lhs) == REALPART_EXPR | |
1483 | || TREE_CODE (lhs) == IMAGPART_EXPR) | |
1484 | && DECL_P (TREE_OPERAND (lhs, 0))) | |
1485 | return false; | |
1486 | ||
2506d97a | 1487 | /* ??? The following could be relaxed allowing component |
ef618bb8 | 1488 | references that do not change the access size. */ |
b88d109e | 1489 | if (TREE_CODE (lhs) == MEM_REF |
2506d97a | 1490 | && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR) |
b88d109e | 1491 | { |
1492 | tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0); | |
2506d97a | 1493 | |
1494 | /* A decl that is wrapped inside a MEM-REF that covers | |
1495 | it full is also rewritable. */ | |
1496 | if (integer_zerop (TREE_OPERAND (lhs, 1)) | |
1497 | && DECL_P (decl) | |
b88d109e | 1498 | && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs)) |
f8689010 | 1499 | /* If the dynamic type of the decl has larger precision than |
1500 | the decl itself we can't use the decls type for SSA rewriting. */ | |
1501 | && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl)) | |
1502 | || compare_tree_int (DECL_SIZE (decl), | |
1503 | TYPE_PRECISION (TREE_TYPE (decl))) == 0) | |
1504 | || (INTEGRAL_TYPE_P (TREE_TYPE (lhs)) | |
1505 | && (TYPE_PRECISION (TREE_TYPE (decl)) | |
1506 | >= TYPE_PRECISION (TREE_TYPE (lhs))))) | |
306097e3 | 1507 | /* Make sure we are not re-writing non-float copying into float |
1508 | copying as that can incur normalization. */ | |
1509 | && (! FLOAT_TYPE_P (TREE_TYPE (decl)) | |
1510 | || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl))) | |
b88d109e | 1511 | && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs))) |
1512 | return false; | |
2506d97a | 1513 | |
1514 | /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable | |
1515 | using a BIT_INSERT_EXPR. */ | |
1516 | if (DECL_P (decl) | |
1517 | && VECTOR_TYPE_P (TREE_TYPE (decl)) | |
1518 | && TYPE_MODE (TREE_TYPE (decl)) != BLKmode | |
c671977d | 1519 | && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)), |
1520 | TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))), 0) | |
a5a5010f | 1521 | && known_ge (mem_ref_offset (lhs), 0) |
1522 | && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))), | |
1523 | mem_ref_offset (lhs)) | |
1524 | && multiple_of_p (sizetype, TREE_OPERAND (lhs, 1), | |
1525 | TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) | |
2506d97a | 1526 | return false; |
b88d109e | 1527 | } |
1528 | ||
2506d97a | 1529 | /* A vector-insert using a BIT_FIELD_REF is rewritable using |
1530 | BIT_INSERT_EXPR. */ | |
1531 | if (TREE_CODE (lhs) == BIT_FIELD_REF | |
1532 | && DECL_P (TREE_OPERAND (lhs, 0)) | |
1533 | && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0))) | |
1534 | && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode | |
c671977d | 1535 | && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)), |
1536 | TYPE_SIZE_UNIT | |
1537 | (TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0)))), 0) | |
2506d97a | 1538 | && (tree_to_uhwi (TREE_OPERAND (lhs, 2)) |
1539 | % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0) | |
1540 | return false; | |
1541 | ||
b88d109e | 1542 | return true; |
1543 | } | |
1544 | ||
5ea2e42f | 1545 | /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and |
1546 | mark the variable VAR for conversion into SSA. Return true when updating | |
1547 | stmts is required. */ | |
cfe3cae3 | 1548 | |
e70e8b13 | 1549 | static void |
1550 | maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs, | |
1551 | bitmap suitable_for_renaming) | |
cfe3cae3 | 1552 | { |
cfe3cae3 | 1553 | /* Global Variables, result decls cannot be changed. */ |
1554 | if (is_global_var (var) | |
1555 | || TREE_CODE (var) == RESULT_DECL | |
1556 | || bitmap_bit_p (addresses_taken, DECL_UID (var))) | |
e70e8b13 | 1557 | return; |
11bbec13 | 1558 | |
cfe3cae3 | 1559 | if (TREE_ADDRESSABLE (var) |
1560 | /* Do not change TREE_ADDRESSABLE if we need to preserve var as | |
1561 | a non-register. Otherwise we are confused and forget to | |
1562 | add virtual operands for it. */ | |
1563 | && (!is_gimple_reg_type (TREE_TYPE (var)) | |
7e21e36b | 1564 | || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE |
1565 | || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE | |
cfe3cae3 | 1566 | || !bitmap_bit_p (not_reg_needs, DECL_UID (var)))) |
1567 | { | |
1568 | TREE_ADDRESSABLE (var) = 0; | |
26055527 | 1569 | /* If we cleared TREE_ADDRESSABLE make sure DECL_GIMPLE_REG_P |
1570 | is unset if we cannot rewrite the var into SSA. */ | |
1571 | if ((TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE | |
1572 | || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE) | |
1573 | && bitmap_bit_p (not_reg_needs, DECL_UID (var))) | |
1574 | DECL_GIMPLE_REG_P (var) = 0; | |
cfe3cae3 | 1575 | if (is_gimple_reg (var)) |
e70e8b13 | 1576 | bitmap_set_bit (suitable_for_renaming, DECL_UID (var)); |
cfe3cae3 | 1577 | if (dump_file) |
1578 | { | |
5ea2e42f | 1579 | fprintf (dump_file, "No longer having address taken: "); |
1ffa4346 | 1580 | print_generic_expr (dump_file, var); |
cfe3cae3 | 1581 | fprintf (dump_file, "\n"); |
1582 | } | |
1583 | } | |
5ea2e42f | 1584 | |
cfe3cae3 | 1585 | if (!DECL_GIMPLE_REG_P (var) |
1586 | && !bitmap_bit_p (not_reg_needs, DECL_UID (var)) | |
1587 | && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE | |
1588 | || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE) | |
1589 | && !TREE_THIS_VOLATILE (var) | |
53e9c5c4 | 1590 | && (!VAR_P (var) || !DECL_HARD_REGISTER (var))) |
cfe3cae3 | 1591 | { |
1592 | DECL_GIMPLE_REG_P (var) = 1; | |
e70e8b13 | 1593 | bitmap_set_bit (suitable_for_renaming, DECL_UID (var)); |
cfe3cae3 | 1594 | if (dump_file) |
1595 | { | |
5ea2e42f | 1596 | fprintf (dump_file, "Now a gimple register: "); |
1ffa4346 | 1597 | print_generic_expr (dump_file, var); |
cfe3cae3 | 1598 | fprintf (dump_file, "\n"); |
1599 | } | |
1600 | } | |
cfe3cae3 | 1601 | } |
1602 | ||
c51887c5 | 1603 | /* Return true when STMT is ASAN mark where second argument is an address |
1604 | of a local variable. */ | |
1605 | ||
1606 | static bool | |
1607 | is_asan_mark_p (gimple *stmt) | |
1608 | { | |
1609 | if (!gimple_call_internal_p (stmt, IFN_ASAN_MARK)) | |
1610 | return false; | |
1611 | ||
1612 | tree addr = get_base_address (gimple_call_arg (stmt, 1)); | |
1613 | if (TREE_CODE (addr) == ADDR_EXPR | |
1614 | && VAR_P (TREE_OPERAND (addr, 0))) | |
1615 | { | |
1616 | tree var = TREE_OPERAND (addr, 0); | |
ba39c1d4 | 1617 | if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE, |
1618 | DECL_ATTRIBUTES (var))) | |
1619 | return false; | |
1620 | ||
c51887c5 | 1621 | unsigned addressable = TREE_ADDRESSABLE (var); |
1622 | TREE_ADDRESSABLE (var) = 0; | |
1623 | bool r = is_gimple_reg (var); | |
1624 | TREE_ADDRESSABLE (var) = addressable; | |
1625 | return r; | |
1626 | } | |
1627 | ||
1628 | return false; | |
1629 | } | |
1630 | ||
3f99bd94 | 1631 | /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */ |
83cfcdaf | 1632 | |
dd277d48 | 1633 | void |
5ea2e42f | 1634 | execute_update_addresses_taken (void) |
83cfcdaf | 1635 | { |
83cfcdaf | 1636 | basic_block bb; |
035def86 | 1637 | auto_bitmap addresses_taken; |
1638 | auto_bitmap not_reg_needs; | |
1639 | auto_bitmap suitable_for_renaming; | |
5ea2e42f | 1640 | tree var; |
cfe3cae3 | 1641 | unsigned i; |
83cfcdaf | 1642 | |
4b366dd3 | 1643 | timevar_push (TV_ADDRESS_TAKEN); |
1644 | ||
83cfcdaf | 1645 | /* Collect into ADDRESSES_TAKEN all variables whose address is taken within |
1646 | the function body. */ | |
fc00614f | 1647 | FOR_EACH_BB_FN (bb, cfun) |
83cfcdaf | 1648 | { |
1a91d914 | 1649 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); |
1650 | gsi_next (&gsi)) | |
83cfcdaf | 1651 | { |
42acab1c | 1652 | gimple *stmt = gsi_stmt (gsi); |
3f99bd94 | 1653 | enum gimple_code code = gimple_code (stmt); |
cfe3cae3 | 1654 | tree decl; |
6d5ec6f8 | 1655 | |
c51887c5 | 1656 | if (code == GIMPLE_CALL) |
5a5ef659 | 1657 | { |
c51887c5 | 1658 | if (optimize_atomic_compare_exchange_p (stmt)) |
1659 | { | |
1660 | /* For __atomic_compare_exchange_N if the second argument | |
1661 | is &var, don't mark var addressable; | |
1662 | if it becomes non-addressable, we'll rewrite it into | |
1663 | ATOMIC_COMPARE_EXCHANGE call. */ | |
1664 | tree arg = gimple_call_arg (stmt, 1); | |
1665 | gimple_call_set_arg (stmt, 1, null_pointer_node); | |
1666 | gimple_ior_addresses_taken (addresses_taken, stmt); | |
1667 | gimple_call_set_arg (stmt, 1, arg); | |
1668 | } | |
1b576300 | 1669 | else if (is_asan_mark_p (stmt) |
1670 | || gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER)) | |
c51887c5 | 1671 | ; |
1672 | else | |
1673 | gimple_ior_addresses_taken (addresses_taken, stmt); | |
5a5ef659 | 1674 | } |
1675 | else | |
1676 | /* Note all addresses taken by the stmt. */ | |
1677 | gimple_ior_addresses_taken (addresses_taken, stmt); | |
6d5ec6f8 | 1678 | |
3f99bd94 | 1679 | /* If we have a call or an assignment, see if the lhs contains |
1680 | a local decl that requires not to be a gimple register. */ | |
1681 | if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) | |
1682 | { | |
d29f7fa8 | 1683 | tree lhs = gimple_get_lhs (stmt); |
b88d109e | 1684 | if (lhs |
1685 | && TREE_CODE (lhs) != SSA_NAME | |
d4d3da7e | 1686 | && ((code == GIMPLE_CALL && ! DECL_P (lhs)) |
1687 | || non_rewritable_lvalue_p (lhs))) | |
182cf5a9 | 1688 | { |
b88d109e | 1689 | decl = get_base_address (lhs); |
1690 | if (DECL_P (decl)) | |
1691 | bitmap_set_bit (not_reg_needs, DECL_UID (decl)); | |
182cf5a9 | 1692 | } |
1693 | } | |
1694 | ||
1695 | if (gimple_assign_single_p (stmt)) | |
1696 | { | |
1697 | tree rhs = gimple_assign_rhs1 (stmt); | |
cfe3cae3 | 1698 | if ((decl = non_rewritable_mem_ref_base (rhs))) |
1699 | bitmap_set_bit (not_reg_needs, DECL_UID (decl)); | |
1700 | } | |
d29f7fa8 | 1701 | |
cfe3cae3 | 1702 | else if (code == GIMPLE_CALL) |
1703 | { | |
1704 | for (i = 0; i < gimple_call_num_args (stmt); ++i) | |
182cf5a9 | 1705 | { |
cfe3cae3 | 1706 | tree arg = gimple_call_arg (stmt, i); |
1707 | if ((decl = non_rewritable_mem_ref_base (arg))) | |
1708 | bitmap_set_bit (not_reg_needs, DECL_UID (decl)); | |
1709 | } | |
1710 | } | |
1711 | ||
1712 | else if (code == GIMPLE_ASM) | |
1713 | { | |
1a91d914 | 1714 | gasm *asm_stmt = as_a <gasm *> (stmt); |
1715 | for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) | |
cfe3cae3 | 1716 | { |
1a91d914 | 1717 | tree link = gimple_asm_output_op (asm_stmt, i); |
737f8866 | 1718 | tree lhs = TREE_VALUE (link); |
ef618bb8 | 1719 | if (TREE_CODE (lhs) != SSA_NAME) |
737f8866 | 1720 | { |
b88d109e | 1721 | decl = get_base_address (lhs); |
ef618bb8 | 1722 | if (DECL_P (decl) |
1723 | && (non_rewritable_lvalue_p (lhs) | |
1724 | /* We cannot move required conversions from | |
1725 | the lhs to the rhs in asm statements, so | |
1726 | require we do not need any. */ | |
1727 | || !useless_type_conversion_p | |
1728 | (TREE_TYPE (lhs), TREE_TYPE (decl)))) | |
b88d109e | 1729 | bitmap_set_bit (not_reg_needs, DECL_UID (decl)); |
737f8866 | 1730 | } |
cfe3cae3 | 1731 | } |
1a91d914 | 1732 | for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) |
cfe3cae3 | 1733 | { |
1a91d914 | 1734 | tree link = gimple_asm_input_op (asm_stmt, i); |
cfe3cae3 | 1735 | if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link)))) |
1736 | bitmap_set_bit (not_reg_needs, DECL_UID (decl)); | |
1737 | } | |
3f99bd94 | 1738 | } |
83cfcdaf | 1739 | } |
75a70cf9 | 1740 | |
1a91d914 | 1741 | for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); |
1742 | gsi_next (&gsi)) | |
83cfcdaf | 1743 | { |
75a70cf9 | 1744 | size_t i; |
1a91d914 | 1745 | gphi *phi = gsi.phi (); |
75a70cf9 | 1746 | |
1747 | for (i = 0; i < gimple_phi_num_args (phi); i++) | |
83cfcdaf | 1748 | { |
1749 | tree op = PHI_ARG_DEF (phi, i), var; | |
1750 | if (TREE_CODE (op) == ADDR_EXPR | |
75a70cf9 | 1751 | && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL |
83cfcdaf | 1752 | && DECL_P (var)) |
1753 | bitmap_set_bit (addresses_taken, DECL_UID (var)); | |
1754 | } | |
1755 | } | |
1756 | } | |
1757 | ||
5ea2e42f | 1758 | /* We cannot iterate over all referenced vars because that can contain |
1759 | unused vars from BLOCK trees, which causes code generation differences | |
1760 | for -g vs. -g0. */ | |
1761 | for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var)) | |
e70e8b13 | 1762 | maybe_optimize_var (var, addresses_taken, not_reg_needs, |
1763 | suitable_for_renaming); | |
5ea2e42f | 1764 | |
f1f41a6c | 1765 | FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var) |
e70e8b13 | 1766 | maybe_optimize_var (var, addresses_taken, not_reg_needs, |
1767 | suitable_for_renaming); | |
83cfcdaf | 1768 | |
5ea2e42f | 1769 | /* Operand caches need to be recomputed for operands referencing the updated |
e70e8b13 | 1770 | variables and operands need to be rewritten to expose bare symbols. */ |
1771 | if (!bitmap_empty_p (suitable_for_renaming)) | |
dd277d48 | 1772 | { |
fc00614f | 1773 | FOR_EACH_BB_FN (bb, cfun) |
1a91d914 | 1774 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) |
182cf5a9 | 1775 | { |
42acab1c | 1776 | gimple *stmt = gsi_stmt (gsi); |
dd277d48 | 1777 | |
182cf5a9 | 1778 | /* Re-write TARGET_MEM_REFs of symbols we want to |
1779 | rewrite into SSA form. */ | |
1780 | if (gimple_assign_single_p (stmt)) | |
1781 | { | |
1782 | tree lhs = gimple_assign_lhs (stmt); | |
1783 | tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt); | |
1784 | tree sym; | |
1785 | ||
e3c6a1ed | 1786 | /* Rewrite LHS IMAG/REALPART_EXPR similar to |
1787 | gimplify_modify_expr_complex_part. */ | |
1788 | if ((TREE_CODE (lhs) == IMAGPART_EXPR | |
1789 | || TREE_CODE (lhs) == REALPART_EXPR) | |
1790 | && DECL_P (TREE_OPERAND (lhs, 0)) | |
1791 | && bitmap_bit_p (suitable_for_renaming, | |
1792 | DECL_UID (TREE_OPERAND (lhs, 0)))) | |
1793 | { | |
1794 | tree other = make_ssa_name (TREE_TYPE (lhs)); | |
1795 | tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR | |
1796 | ? REALPART_EXPR : IMAGPART_EXPR, | |
1797 | TREE_TYPE (other), | |
1798 | TREE_OPERAND (lhs, 0)); | |
42acab1c | 1799 | gimple *load = gimple_build_assign (other, lrhs); |
08110213 | 1800 | location_t loc = gimple_location (stmt); |
1801 | gimple_set_location (load, loc); | |
e3c6a1ed | 1802 | gimple_set_vuse (load, gimple_vuse (stmt)); |
1803 | gsi_insert_before (&gsi, load, GSI_SAME_STMT); | |
1804 | gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0)); | |
1805 | gimple_assign_set_rhs_with_ops | |
1806 | (&gsi, COMPLEX_EXPR, | |
1807 | TREE_CODE (lhs) == IMAGPART_EXPR | |
1808 | ? other : gimple_assign_rhs1 (stmt), | |
1809 | TREE_CODE (lhs) == IMAGPART_EXPR | |
1810 | ? gimple_assign_rhs1 (stmt) : other, NULL_TREE); | |
1811 | stmt = gsi_stmt (gsi); | |
1812 | unlink_stmt_vdef (stmt); | |
1813 | update_stmt (stmt); | |
1814 | continue; | |
1815 | } | |
1816 | ||
2506d97a | 1817 | /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS |
1818 | into a BIT_INSERT_EXPR. */ | |
1819 | if (TREE_CODE (lhs) == BIT_FIELD_REF | |
1820 | && DECL_P (TREE_OPERAND (lhs, 0)) | |
1821 | && bitmap_bit_p (suitable_for_renaming, | |
1822 | DECL_UID (TREE_OPERAND (lhs, 0))) | |
1823 | && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0))) | |
1824 | && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode | |
c671977d | 1825 | && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)), |
1826 | TYPE_SIZE_UNIT (TREE_TYPE | |
1827 | (TREE_TYPE (TREE_OPERAND (lhs, 0)))), | |
1828 | 0) | |
2506d97a | 1829 | && (tree_to_uhwi (TREE_OPERAND (lhs, 2)) |
1830 | % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0)) | |
1831 | { | |
1832 | tree var = TREE_OPERAND (lhs, 0); | |
1833 | tree val = gimple_assign_rhs1 (stmt); | |
c671977d | 1834 | if (! types_compatible_p (TREE_TYPE (TREE_TYPE (var)), |
1835 | TREE_TYPE (val))) | |
1836 | { | |
1837 | tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (var))); | |
1838 | gimple *pun | |
1839 | = gimple_build_assign (tem, | |
1840 | build1 (VIEW_CONVERT_EXPR, | |
1841 | TREE_TYPE (tem), val)); | |
1842 | gsi_insert_before (&gsi, pun, GSI_SAME_STMT); | |
1843 | val = tem; | |
1844 | } | |
2506d97a | 1845 | tree bitpos = TREE_OPERAND (lhs, 2); |
1846 | gimple_assign_set_lhs (stmt, var); | |
1847 | gimple_assign_set_rhs_with_ops | |
1848 | (&gsi, BIT_INSERT_EXPR, var, val, bitpos); | |
1849 | stmt = gsi_stmt (gsi); | |
1850 | unlink_stmt_vdef (stmt); | |
1851 | update_stmt (stmt); | |
1852 | continue; | |
1853 | } | |
1854 | ||
1855 | /* Rewrite a vector insert using a MEM_REF on the LHS | |
1856 | into a BIT_INSERT_EXPR. */ | |
1857 | if (TREE_CODE (lhs) == MEM_REF | |
1858 | && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR | |
1859 | && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0)) | |
1860 | && DECL_P (sym) | |
1861 | && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)) | |
1862 | && VECTOR_TYPE_P (TREE_TYPE (sym)) | |
1863 | && TYPE_MODE (TREE_TYPE (sym)) != BLKmode | |
c671977d | 1864 | && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)), |
1865 | TYPE_SIZE_UNIT | |
1866 | (TREE_TYPE (TREE_TYPE (sym))), 0) | |
2506d97a | 1867 | && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1)) |
1868 | && tree_int_cst_lt (TREE_OPERAND (lhs, 1), | |
1869 | TYPE_SIZE_UNIT (TREE_TYPE (sym))) | |
1870 | && (tree_to_uhwi (TREE_OPERAND (lhs, 1)) | |
1871 | % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0) | |
1872 | { | |
1873 | tree val = gimple_assign_rhs1 (stmt); | |
c671977d | 1874 | if (! types_compatible_p (TREE_TYPE (val), |
1875 | TREE_TYPE (TREE_TYPE (sym)))) | |
1876 | { | |
1877 | tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (sym))); | |
1878 | gimple *pun | |
1879 | = gimple_build_assign (tem, | |
1880 | build1 (VIEW_CONVERT_EXPR, | |
1881 | TREE_TYPE (tem), val)); | |
1882 | gsi_insert_before (&gsi, pun, GSI_SAME_STMT); | |
1883 | val = tem; | |
1884 | } | |
2506d97a | 1885 | tree bitpos |
1886 | = wide_int_to_tree (bitsizetype, | |
1887 | mem_ref_offset (lhs) * BITS_PER_UNIT); | |
1888 | gimple_assign_set_lhs (stmt, sym); | |
1889 | gimple_assign_set_rhs_with_ops | |
1890 | (&gsi, BIT_INSERT_EXPR, sym, val, bitpos); | |
1891 | stmt = gsi_stmt (gsi); | |
1892 | unlink_stmt_vdef (stmt); | |
1893 | update_stmt (stmt); | |
1894 | continue; | |
1895 | } | |
1896 | ||
182cf5a9 | 1897 | /* We shouldn't have any fancy wrapping of |
1898 | component-refs on the LHS, but look through | |
1899 | VIEW_CONVERT_EXPRs as that is easy. */ | |
1900 | while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR) | |
1901 | lhs = TREE_OPERAND (lhs, 0); | |
1902 | if (TREE_CODE (lhs) == MEM_REF | |
1903 | && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR | |
1904 | && integer_zerop (TREE_OPERAND (lhs, 1)) | |
1905 | && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0)) | |
1906 | && DECL_P (sym) | |
1907 | && !TREE_ADDRESSABLE (sym) | |
e70e8b13 | 1908 | && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))) |
182cf5a9 | 1909 | lhs = sym; |
1910 | else | |
1911 | lhs = gimple_assign_lhs (stmt); | |
1912 | ||
1913 | /* Rewrite the RHS and make sure the resulting assignment | |
1914 | is validly typed. */ | |
e70e8b13 | 1915 | maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming); |
182cf5a9 | 1916 | rhs = gimple_assign_rhs1 (stmt); |
1917 | if (gimple_assign_lhs (stmt) != lhs | |
1918 | && !useless_type_conversion_p (TREE_TYPE (lhs), | |
1919 | TREE_TYPE (rhs))) | |
7499ef4a | 1920 | { |
1921 | if (gimple_clobber_p (stmt)) | |
1922 | { | |
1923 | rhs = build_constructor (TREE_TYPE (lhs), NULL); | |
1924 | TREE_THIS_VOLATILE (rhs) = 1; | |
1925 | } | |
1926 | else | |
1927 | rhs = fold_build1 (VIEW_CONVERT_EXPR, | |
1928 | TREE_TYPE (lhs), rhs); | |
1929 | } | |
182cf5a9 | 1930 | if (gimple_assign_lhs (stmt) != lhs) |
1931 | gimple_assign_set_lhs (stmt, lhs); | |
1932 | ||
1933 | if (gimple_assign_rhs1 (stmt) != rhs) | |
1934 | { | |
1935 | gimple_stmt_iterator gsi = gsi_for_stmt (stmt); | |
1936 | gimple_assign_set_rhs_from_tree (&gsi, rhs); | |
1937 | } | |
1938 | } | |
1939 | ||
cfe3cae3 | 1940 | else if (gimple_code (stmt) == GIMPLE_CALL) |
1941 | { | |
1942 | unsigned i; | |
5a5ef659 | 1943 | if (optimize_atomic_compare_exchange_p (stmt)) |
1944 | { | |
1945 | tree expected = gimple_call_arg (stmt, 1); | |
1946 | if (bitmap_bit_p (suitable_for_renaming, | |
1947 | DECL_UID (TREE_OPERAND (expected, 0)))) | |
1948 | { | |
1949 | fold_builtin_atomic_compare_exchange (&gsi); | |
1950 | continue; | |
1951 | } | |
1952 | } | |
c51887c5 | 1953 | else if (is_asan_mark_p (stmt)) |
1954 | { | |
1955 | tree var = TREE_OPERAND (gimple_call_arg (stmt, 1), 0); | |
1956 | if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var))) | |
1957 | { | |
1958 | unlink_stmt_vdef (stmt); | |
1959 | if (asan_mark_p (stmt, ASAN_MARK_POISON)) | |
1960 | { | |
1961 | gcall *call | |
1962 | = gimple_build_call_internal (IFN_ASAN_POISON, 0); | |
1963 | gimple_call_set_lhs (call, var); | |
1964 | gsi_replace (&gsi, call, GSI_SAME_STMT); | |
1965 | } | |
1966 | else | |
ba39c1d4 | 1967 | { |
1968 | /* In ASAN_MARK (UNPOISON, &b, ...) the variable | |
1969 | is uninitialized. Avoid dependencies on | |
1970 | previous out of scope value. */ | |
1971 | tree clobber | |
1972 | = build_constructor (TREE_TYPE (var), NULL); | |
1973 | TREE_THIS_VOLATILE (clobber) = 1; | |
1974 | gimple *g = gimple_build_assign (var, clobber); | |
1975 | gsi_replace (&gsi, g, GSI_SAME_STMT); | |
1976 | } | |
c51887c5 | 1977 | continue; |
1978 | } | |
1979 | } | |
1b576300 | 1980 | else if (gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER)) |
1981 | for (i = 1; i < gimple_call_num_args (stmt); i++) | |
1982 | { | |
1983 | tree *argp = gimple_call_arg_ptr (stmt, i); | |
1984 | if (*argp == null_pointer_node) | |
1985 | continue; | |
1986 | gcc_assert (TREE_CODE (*argp) == ADDR_EXPR | |
1987 | && VAR_P (TREE_OPERAND (*argp, 0))); | |
1988 | tree var = TREE_OPERAND (*argp, 0); | |
1989 | if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var))) | |
1990 | *argp = null_pointer_node; | |
1991 | } | |
cfe3cae3 | 1992 | for (i = 0; i < gimple_call_num_args (stmt); ++i) |
1993 | { | |
1994 | tree *argp = gimple_call_arg_ptr (stmt, i); | |
e70e8b13 | 1995 | maybe_rewrite_mem_ref_base (argp, suitable_for_renaming); |
cfe3cae3 | 1996 | } |
1997 | } | |
1998 | ||
1999 | else if (gimple_code (stmt) == GIMPLE_ASM) | |
182cf5a9 | 2000 | { |
1a91d914 | 2001 | gasm *asm_stmt = as_a <gasm *> (stmt); |
182cf5a9 | 2002 | unsigned i; |
1a91d914 | 2003 | for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) |
182cf5a9 | 2004 | { |
1a91d914 | 2005 | tree link = gimple_asm_output_op (asm_stmt, i); |
e70e8b13 | 2006 | maybe_rewrite_mem_ref_base (&TREE_VALUE (link), |
2007 | suitable_for_renaming); | |
182cf5a9 | 2008 | } |
1a91d914 | 2009 | for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) |
182cf5a9 | 2010 | { |
1a91d914 | 2011 | tree link = gimple_asm_input_op (asm_stmt, i); |
e70e8b13 | 2012 | maybe_rewrite_mem_ref_base (&TREE_VALUE (link), |
2013 | suitable_for_renaming); | |
182cf5a9 | 2014 | } |
2015 | } | |
2016 | ||
fd2b4bf0 | 2017 | else if (gimple_debug_bind_p (stmt) |
2018 | && gimple_debug_bind_has_value_p (stmt)) | |
2019 | { | |
2020 | tree *valuep = gimple_debug_bind_get_value_ptr (stmt); | |
2021 | tree decl; | |
e70e8b13 | 2022 | maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming); |
fd2b4bf0 | 2023 | decl = non_rewritable_mem_ref_base (*valuep); |
e70e8b13 | 2024 | if (decl |
2025 | && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl))) | |
fd2b4bf0 | 2026 | gimple_debug_bind_reset_value (stmt); |
2027 | } | |
2028 | ||
182cf5a9 | 2029 | if (gimple_references_memory_p (stmt) |
2030 | || is_gimple_debug (stmt)) | |
2031 | update_stmt (stmt); | |
9b57c1ed | 2032 | |
2033 | gsi_next (&gsi); | |
182cf5a9 | 2034 | } |
dd277d48 | 2035 | |
2036 | /* Update SSA form here, we are called as non-pass as well. */ | |
41f75a99 | 2037 | if (number_of_loops (cfun) > 1 |
2038 | && loops_state_satisfies_p (LOOP_CLOSED_SSA)) | |
0ab72b53 | 2039 | rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa); |
2040 | else | |
2041 | update_ssa (TODO_update_ssa); | |
dd277d48 | 2042 | } |
83cfcdaf | 2043 | |
4b366dd3 | 2044 | timevar_pop (TV_ADDRESS_TAKEN); |
83cfcdaf | 2045 | } |
2046 | ||
cbe8bda8 | 2047 | namespace { |
2048 | ||
2049 | const pass_data pass_data_update_address_taken = | |
83cfcdaf | 2050 | { |
cbe8bda8 | 2051 | GIMPLE_PASS, /* type */ |
2052 | "addressables", /* name */ | |
2053 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 2054 | TV_ADDRESS_TAKEN, /* tv_id */ |
2055 | PROP_ssa, /* properties_required */ | |
2056 | 0, /* properties_provided */ | |
2057 | 0, /* properties_destroyed */ | |
2058 | 0, /* todo_flags_start */ | |
2059 | TODO_update_address_taken, /* todo_flags_finish */ | |
83cfcdaf | 2060 | }; |
cbe8bda8 | 2061 | |
2062 | class pass_update_address_taken : public gimple_opt_pass | |
2063 | { | |
2064 | public: | |
9af5ce0c | 2065 | pass_update_address_taken (gcc::context *ctxt) |
2066 | : gimple_opt_pass (pass_data_update_address_taken, ctxt) | |
cbe8bda8 | 2067 | {} |
2068 | ||
2069 | /* opt_pass methods: */ | |
2070 | ||
2071 | }; // class pass_update_address_taken | |
2072 | ||
2073 | } // anon namespace | |
2074 | ||
2075 | gimple_opt_pass * | |
2076 | make_pass_update_address_taken (gcc::context *ctxt) | |
2077 | { | |
2078 | return new pass_update_address_taken (ctxt); | |
2079 | } |