]>
Commit | Line | Data |
---|---|---|
4ee9c684 | 1 | /* SSA operands management for trees. |
7cf0dbf3 | 2 | Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 |
75a70cf9 | 3 | Free Software Foundation, Inc. |
4ee9c684 | 4 | |
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
8c4c00c1 | 9 | the Free Software Foundation; either version 3, or (at your option) |
4ee9c684 | 10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
26 | #include "flags.h" | |
27 | #include "function.h" | |
ce084dfc | 28 | #include "tree-pretty-print.h" |
29 | #include "gimple-pretty-print.h" | |
4ee9c684 | 30 | #include "tree-flow.h" |
31 | #include "tree-inline.h" | |
32 | #include "tree-pass.h" | |
33 | #include "ggc.h" | |
34 | #include "timevar.h" | |
acc70efa | 35 | #include "langhooks.h" |
85f3d834 | 36 | #include "diagnostic-core.h" |
37 | ||
5b110d39 | 38 | |
48e1416a | 39 | /* This file contains the code required to manage the operands cache of the |
40 | SSA optimizer. For every stmt, we maintain an operand cache in the stmt | |
41 | annotation. This cache contains operands that will be of interest to | |
42 | optimizers and other passes wishing to manipulate the IL. | |
5b110d39 | 43 | |
48e1416a | 44 | The operand type are broken up into REAL and VIRTUAL operands. The real |
45 | operands are represented as pointers into the stmt's operand tree. Thus | |
5b110d39 | 46 | any manipulation of the real operands will be reflected in the actual tree. |
48e1416a | 47 | Virtual operands are represented solely in the cache, although the base |
48 | variable for the SSA_NAME may, or may not occur in the stmt's tree. | |
5b110d39 | 49 | Manipulation of the virtual operands will not be reflected in the stmt tree. |
50 | ||
48e1416a | 51 | The routines in this file are concerned with creating this operand cache |
5b110d39 | 52 | from a stmt tree. |
53 | ||
48e1416a | 54 | The operand tree is the parsed by the various get_* routines which look |
55 | through the stmt tree for the occurrence of operands which may be of | |
56 | interest, and calls are made to the append_* routines whenever one is | |
57 | found. There are 4 of these routines, each representing one of the | |
4fb5e5ca | 58 | 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. |
5b110d39 | 59 | |
48e1416a | 60 | The append_* routines check for duplication, and simply keep a list of |
5b110d39 | 61 | unique objects for each operand type in the build_* extendable vectors. |
62 | ||
48e1416a | 63 | Once the stmt tree is completely parsed, the finalize_ssa_operands() |
64 | routine is called, which proceeds to perform the finalization routine | |
4fb5e5ca | 65 | on each of the 4 operand vectors which have been built up. |
5b110d39 | 66 | |
48e1416a | 67 | If the stmt had a previous operand cache, the finalization routines |
68 | attempt to match up the new operands with the old ones. If it's a perfect | |
69 | match, the old vector is simply reused. If it isn't a perfect match, then | |
70 | a new vector is created and the new operands are placed there. For | |
71 | virtual operands, if the previous cache had SSA_NAME version of a | |
72 | variable, and that same variable occurs in the same operands cache, then | |
5b110d39 | 73 | the new cache vector will also get the same SSA_NAME. |
74 | ||
4ec25329 | 75 | i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new |
76 | operand vector for VUSE, then the new vector will also be modified | |
77 | such that it contains 'a_5' rather than 'a'. */ | |
5b110d39 | 78 | |
4fb5e5ca | 79 | /* Structure storing statistics on how many call clobbers we have, and |
80 | how many where avoided. */ | |
81 | ||
48e1416a | 82 | static struct |
4fb5e5ca | 83 | { |
84 | /* Number of call-clobbered ops we attempt to add to calls in | |
85 | add_call_clobbered_mem_symbols. */ | |
86 | unsigned int clobbered_vars; | |
87 | ||
88 | /* Number of write-clobbers (VDEFs) avoided by using | |
89 | not_written information. */ | |
90 | unsigned int static_write_clobbers_avoided; | |
91 | ||
92 | /* Number of reads (VUSEs) avoided by using not_read information. */ | |
93 | unsigned int static_read_clobbers_avoided; | |
48e1416a | 94 | |
4fb5e5ca | 95 | /* Number of write-clobbers avoided because the variable can't escape to |
96 | this call. */ | |
97 | unsigned int unescapable_clobbers_avoided; | |
98 | ||
99 | /* Number of read-only uses we attempt to add to calls in | |
100 | add_call_read_mem_symbols. */ | |
101 | unsigned int readonly_clobbers; | |
102 | ||
103 | /* Number of read-only uses we avoid using not_read information. */ | |
104 | unsigned int static_readonly_clobbers_avoided; | |
105 | } clobber_stats; | |
106 | ||
107 | ||
59b2314d | 108 | /* Flags to describe operand properties in helpers. */ |
4ee9c684 | 109 | |
110 | /* By default, operands are loaded. */ | |
4fb5e5ca | 111 | #define opf_use 0 |
4ee9c684 | 112 | |
48e1416a | 113 | /* Operand is the target of an assignment expression or a |
f6255040 | 114 | call-clobbered variable. */ |
4fb5e5ca | 115 | #define opf_def (1 << 0) |
2cf24776 | 116 | |
4ee9c684 | 117 | /* No virtual operands should be created in the expression. This is used |
118 | when traversing ADDR_EXPR nodes which have different semantics than | |
119 | other expressions. Inside an ADDR_EXPR node, the only operands that we | |
120 | need to consider are indices into arrays. For instance, &a.b[i] should | |
121 | generate a USE of 'i' but it should not generate a VUSE for 'a' nor a | |
122 | VUSE for 'b'. */ | |
4fb5e5ca | 123 | #define opf_no_vops (1 << 1) |
4ee9c684 | 124 | |
4fb5e5ca | 125 | /* Operand is an implicit reference. This is used to distinguish |
75a70cf9 | 126 | explicit assignments in the form of MODIFY_EXPR from |
4fb5e5ca | 127 | clobbering sites like function calls or ASM_EXPRs. */ |
128 | #define opf_implicit (1 << 2) | |
868a0f34 | 129 | |
182cf5a9 | 130 | /* Operand is in a place where address-taken does not imply addressable. */ |
131 | #define opf_non_addressable (1 << 3) | |
132 | ||
133 | /* Operand is in a place where opf_non_addressable does not apply. */ | |
134 | #define opf_not_non_addressable (1 << 4) | |
135 | ||
4ee9c684 | 136 | /* Array for building all the def operands. */ |
ed542b9f | 137 | static VEC(tree,heap) *build_defs; |
4ee9c684 | 138 | |
139 | /* Array for building all the use operands. */ | |
ed542b9f | 140 | static VEC(tree,heap) *build_uses; |
4ee9c684 | 141 | |
dd277d48 | 142 | /* The built VDEF operand. */ |
143 | static tree build_vdef; | |
4ee9c684 | 144 | |
dd277d48 | 145 | /* The built VUSE operand. */ |
146 | static tree build_vuse; | |
4ee9c684 | 147 | |
48e1416a | 148 | /* Bitmap obstack for our datastructures that needs to survive across |
a7614546 | 149 | compilations of multiple functions. */ |
363d040e | 150 | static bitmap_obstack operands_bitmap_obstack; |
085b7aab | 151 | |
75a70cf9 | 152 | static void get_expr_operands (gimple, tree *, int); |
fa999566 | 153 | |
fcbe34ba | 154 | /* Number of functions with initialized ssa_operands. */ |
155 | static int n_initialized = 0; | |
5b110d39 | 156 | |
7063afc3 | 157 | /* Return the DECL_UID of the base variable of T. */ |
5b110d39 | 158 | |
b66731e8 | 159 | static inline unsigned |
7ecb5bb2 | 160 | get_name_decl (const_tree t) |
4ee9c684 | 161 | { |
ed542b9f | 162 | if (TREE_CODE (t) != SSA_NAME) |
163 | return DECL_UID (t); | |
164 | else | |
165 | return DECL_UID (SSA_NAME_VAR (t)); | |
4ee9c684 | 166 | } |
167 | ||
fa999566 | 168 | |
f6255040 | 169 | /* Return true if the SSA operands cache is active. */ |
5b110d39 | 170 | |
b66731e8 | 171 | bool |
172 | ssa_operands_active (void) | |
4ee9c684 | 173 | { |
75a70cf9 | 174 | /* This function may be invoked from contexts where CFUN is NULL |
175 | (IPA passes), return false for now. FIXME: operands may be | |
176 | active in each individual function, maybe this function should | |
177 | take CFUN as a parameter. */ | |
178 | if (cfun == NULL) | |
179 | return false; | |
180 | ||
fcbe34ba | 181 | return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active; |
b66731e8 | 182 | } |
4ee9c684 | 183 | |
48e1416a | 184 | |
dd277d48 | 185 | /* Create the VOP variable, an artificial global variable to act as a |
186 | representative of all of the virtual operands FUD chain. */ | |
fa999566 | 187 | |
dd277d48 | 188 | static void |
189 | create_vop_var (void) | |
dadb7503 | 190 | { |
dd277d48 | 191 | tree global_var; |
192 | ||
193 | gcc_assert (cfun->gimple_df->vop == NULL_TREE); | |
194 | ||
e60a6f7b | 195 | global_var = build_decl (BUILTINS_LOCATION, VAR_DECL, |
196 | get_identifier (".MEM"), | |
dd277d48 | 197 | void_type_node); |
198 | DECL_ARTIFICIAL (global_var) = 1; | |
199 | TREE_READONLY (global_var) = 0; | |
200 | DECL_EXTERNAL (global_var) = 1; | |
201 | TREE_STATIC (global_var) = 1; | |
202 | TREE_USED (global_var) = 1; | |
203 | DECL_CONTEXT (global_var) = NULL_TREE; | |
204 | TREE_THIS_VOLATILE (global_var) = 0; | |
205 | TREE_ADDRESSABLE (global_var) = 0; | |
206 | ||
207 | create_var_ann (global_var); | |
208 | add_referenced_var (global_var); | |
209 | cfun->gimple_df->vop = global_var; | |
dadb7503 | 210 | } |
dadb7503 | 211 | |
dd277d48 | 212 | /* These are the sizes of the operand memory buffer in bytes which gets |
213 | allocated each time more operands space is required. The final value is | |
214 | the amount that is allocated every time after that. | |
215 | In 1k we can fit 25 use operands (or 63 def operands) on a host with | |
216 | 8 byte pointers, that would be 10 statements each with 1 def and 2 | |
217 | uses. */ | |
48e1416a | 218 | |
dadb7503 | 219 | #define OP_SIZE_INIT 0 |
dd277d48 | 220 | #define OP_SIZE_1 (1024 - sizeof (void *)) |
221 | #define OP_SIZE_2 (1024 * 4 - sizeof (void *)) | |
222 | #define OP_SIZE_3 (1024 * 16 - sizeof (void *)) | |
dadb7503 | 223 | |
b66731e8 | 224 | /* Initialize the operand cache routines. */ |
225 | ||
226 | void | |
227 | init_ssa_operands (void) | |
228 | { | |
fcbe34ba | 229 | if (!n_initialized++) |
230 | { | |
231 | build_defs = VEC_alloc (tree, heap, 5); | |
232 | build_uses = VEC_alloc (tree, heap, 10); | |
dd277d48 | 233 | build_vuse = NULL_TREE; |
234 | build_vdef = NULL_TREE; | |
363d040e | 235 | bitmap_obstack_initialize (&operands_bitmap_obstack); |
fcbe34ba | 236 | } |
237 | ||
238 | gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL); | |
363d040e | 239 | gimple_ssa_operands (cfun)->operand_memory_index |
240 | = gimple_ssa_operands (cfun)->ssa_operand_mem_size; | |
fcbe34ba | 241 | gimple_ssa_operands (cfun)->ops_active = true; |
7bbb6ff8 | 242 | memset (&clobber_stats, 0, sizeof (clobber_stats)); |
363d040e | 243 | gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT; |
dd277d48 | 244 | create_vop_var (); |
b66731e8 | 245 | } |
4ee9c684 | 246 | |
5b110d39 | 247 | |
b66731e8 | 248 | /* Dispose of anything required by the operand routines. */ |
249 | ||
250 | void | |
251 | fini_ssa_operands (void) | |
252 | { | |
253 | struct ssa_operand_memory_d *ptr; | |
4fb5e5ca | 254 | |
fcbe34ba | 255 | if (!--n_initialized) |
256 | { | |
257 | VEC_free (tree, heap, build_defs); | |
258 | VEC_free (tree, heap, build_uses); | |
dd277d48 | 259 | build_vdef = NULL_TREE; |
260 | build_vuse = NULL_TREE; | |
fcbe34ba | 261 | } |
4fb5e5ca | 262 | |
fcbe34ba | 263 | gimple_ssa_operands (cfun)->free_defs = NULL; |
264 | gimple_ssa_operands (cfun)->free_uses = NULL; | |
4fb5e5ca | 265 | |
fcbe34ba | 266 | while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL) |
b66731e8 | 267 | { |
fcbe34ba | 268 | gimple_ssa_operands (cfun)->operand_memory |
269 | = gimple_ssa_operands (cfun)->operand_memory->next; | |
b66731e8 | 270 | ggc_free (ptr); |
5b110d39 | 271 | } |
272 | ||
fcbe34ba | 273 | gimple_ssa_operands (cfun)->ops_active = false; |
4fb5e5ca | 274 | |
363d040e | 275 | if (!n_initialized) |
276 | bitmap_obstack_release (&operands_bitmap_obstack); | |
75a70cf9 | 277 | |
dd277d48 | 278 | cfun->gimple_df->vop = NULL_TREE; |
279 | ||
7bbb6ff8 | 280 | if (dump_file && (dump_flags & TDF_STATS)) |
281 | { | |
4fb5e5ca | 282 | fprintf (dump_file, "Original clobbered vars: %d\n", |
fa999566 | 283 | clobber_stats.clobbered_vars); |
4fb5e5ca | 284 | fprintf (dump_file, "Static write clobbers avoided: %d\n", |
fa999566 | 285 | clobber_stats.static_write_clobbers_avoided); |
4fb5e5ca | 286 | fprintf (dump_file, "Static read clobbers avoided: %d\n", |
fa999566 | 287 | clobber_stats.static_read_clobbers_avoided); |
4fb5e5ca | 288 | fprintf (dump_file, "Unescapable clobbers avoided: %d\n", |
fa999566 | 289 | clobber_stats.unescapable_clobbers_avoided); |
4fb5e5ca | 290 | fprintf (dump_file, "Original read-only clobbers: %d\n", |
fa999566 | 291 | clobber_stats.readonly_clobbers); |
4fb5e5ca | 292 | fprintf (dump_file, "Static read-only clobbers avoided: %d\n", |
fa999566 | 293 | clobber_stats.static_readonly_clobbers_avoided); |
7bbb6ff8 | 294 | } |
b66731e8 | 295 | } |
5b110d39 | 296 | |
4ee9c684 | 297 | |
dd277d48 | 298 | /* Return memory for an operand of size SIZE. */ |
48e1416a | 299 | |
b66731e8 | 300 | static inline void * |
301 | ssa_operand_alloc (unsigned size) | |
302 | { | |
303 | char *ptr; | |
4fb5e5ca | 304 | |
dd277d48 | 305 | gcc_assert (size == sizeof (struct use_optype_d) |
306 | || size == sizeof (struct def_optype_d)); | |
307 | ||
fcbe34ba | 308 | if (gimple_ssa_operands (cfun)->operand_memory_index + size |
363d040e | 309 | >= gimple_ssa_operands (cfun)->ssa_operand_mem_size) |
b66731e8 | 310 | { |
311 | struct ssa_operand_memory_d *ptr; | |
dadb7503 | 312 | |
dd277d48 | 313 | switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size) |
314 | { | |
315 | case OP_SIZE_INIT: | |
316 | gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1; | |
317 | break; | |
318 | case OP_SIZE_1: | |
319 | gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2; | |
320 | break; | |
321 | case OP_SIZE_2: | |
322 | case OP_SIZE_3: | |
323 | gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3; | |
324 | break; | |
325 | default: | |
326 | gcc_unreachable (); | |
327 | } | |
dadb7503 | 328 | |
ba72912a | 329 | |
330 | ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *) | |
331 | + gimple_ssa_operands (cfun)->ssa_operand_mem_size); | |
332 | ||
fcbe34ba | 333 | ptr->next = gimple_ssa_operands (cfun)->operand_memory; |
334 | gimple_ssa_operands (cfun)->operand_memory = ptr; | |
335 | gimple_ssa_operands (cfun)->operand_memory_index = 0; | |
b66731e8 | 336 | } |
dd277d48 | 337 | |
fcbe34ba | 338 | ptr = &(gimple_ssa_operands (cfun)->operand_memory |
339 | ->mem[gimple_ssa_operands (cfun)->operand_memory_index]); | |
340 | gimple_ssa_operands (cfun)->operand_memory_index += size; | |
b66731e8 | 341 | return ptr; |
4ee9c684 | 342 | } |
343 | ||
5b110d39 | 344 | |
dadb7503 | 345 | /* Allocate a DEF operand. */ |
346 | ||
4fb5e5ca | 347 | static inline struct def_optype_d * |
348 | alloc_def (void) | |
349 | { | |
350 | struct def_optype_d *ret; | |
351 | if (gimple_ssa_operands (cfun)->free_defs) | |
352 | { | |
353 | ret = gimple_ssa_operands (cfun)->free_defs; | |
354 | gimple_ssa_operands (cfun)->free_defs | |
355 | = gimple_ssa_operands (cfun)->free_defs->next; | |
356 | } | |
357 | else | |
358 | ret = (struct def_optype_d *) | |
dadb7503 | 359 | ssa_operand_alloc (sizeof (struct def_optype_d)); |
4fb5e5ca | 360 | return ret; |
361 | } | |
362 | ||
363 | ||
dadb7503 | 364 | /* Allocate a USE operand. */ |
365 | ||
4fb5e5ca | 366 | static inline struct use_optype_d * |
367 | alloc_use (void) | |
368 | { | |
369 | struct use_optype_d *ret; | |
370 | if (gimple_ssa_operands (cfun)->free_uses) | |
371 | { | |
372 | ret = gimple_ssa_operands (cfun)->free_uses; | |
373 | gimple_ssa_operands (cfun)->free_uses | |
374 | = gimple_ssa_operands (cfun)->free_uses->next; | |
375 | } | |
376 | else | |
dadb7503 | 377 | ret = (struct use_optype_d *) |
378 | ssa_operand_alloc (sizeof (struct use_optype_d)); | |
4fb5e5ca | 379 | return ret; |
380 | } | |
381 | ||
382 | ||
dadb7503 | 383 | /* Adds OP to the list of defs after LAST. */ |
fd12afe9 | 384 | |
48e1416a | 385 | static inline def_optype_p |
dadb7503 | 386 | add_def_op (tree *op, def_optype_p last) |
b5b59dda | 387 | { |
f0d6e81c | 388 | def_optype_p new_def; |
b5b59dda | 389 | |
f0d6e81c | 390 | new_def = alloc_def (); |
391 | DEF_OP_PTR (new_def) = op; | |
392 | last->next = new_def; | |
393 | new_def->next = NULL; | |
394 | return new_def; | |
b5b59dda | 395 | } |
396 | ||
dadb7503 | 397 | |
398 | /* Adds OP to the list of uses of statement STMT after LAST. */ | |
b5b59dda | 399 | |
4fb5e5ca | 400 | static inline use_optype_p |
75a70cf9 | 401 | add_use_op (gimple stmt, tree *op, use_optype_p last) |
b5b59dda | 402 | { |
f0d6e81c | 403 | use_optype_p new_use; |
404 | ||
405 | new_use = alloc_use (); | |
406 | USE_OP_PTR (new_use)->use = op; | |
407 | link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt); | |
408 | last->next = new_use; | |
409 | new_use->next = NULL; | |
410 | return new_use; | |
b5b59dda | 411 | } |
412 | ||
b5b59dda | 413 | |
b5b59dda | 414 | |
b5b59dda | 415 | /* Takes elements from build_defs and turns them into def operands of STMT. |
dadb7503 | 416 | TODO -- Make build_defs VEC of tree *. */ |
b5b59dda | 417 | |
418 | static inline void | |
75a70cf9 | 419 | finalize_ssa_defs (gimple stmt) |
b5b59dda | 420 | { |
421 | unsigned new_i; | |
422 | struct def_optype_d new_list; | |
e817549b | 423 | def_optype_p old_ops, last; |
dadb7503 | 424 | unsigned int num = VEC_length (tree, build_defs); |
425 | ||
426 | /* There should only be a single real definition per assignment. */ | |
75a70cf9 | 427 | gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1); |
b5b59dda | 428 | |
dd277d48 | 429 | /* Pre-pend the vdef we may have built. */ |
430 | if (build_vdef != NULL_TREE) | |
431 | { | |
432 | tree oldvdef = gimple_vdef (stmt); | |
433 | if (oldvdef | |
434 | && TREE_CODE (oldvdef) == SSA_NAME) | |
435 | oldvdef = SSA_NAME_VAR (oldvdef); | |
436 | if (oldvdef != build_vdef) | |
437 | gimple_set_vdef (stmt, build_vdef); | |
438 | VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt)); | |
439 | ++num; | |
440 | } | |
441 | ||
b5b59dda | 442 | new_list.next = NULL; |
443 | last = &new_list; | |
444 | ||
75a70cf9 | 445 | old_ops = gimple_def_ops (stmt); |
b5b59dda | 446 | |
447 | new_i = 0; | |
5b110d39 | 448 | |
dd277d48 | 449 | /* Clear and unlink a no longer necessary VDEF. */ |
450 | if (build_vdef == NULL_TREE | |
451 | && gimple_vdef (stmt) != NULL_TREE) | |
452 | { | |
453 | if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) | |
454 | { | |
455 | unlink_stmt_vdef (stmt); | |
456 | release_ssa_name (gimple_vdef (stmt)); | |
457 | } | |
458 | gimple_set_vdef (stmt, NULL_TREE); | |
459 | } | |
460 | ||
461 | /* If we have a non-SSA_NAME VDEF, mark it for renaming. */ | |
462 | if (gimple_vdef (stmt) | |
463 | && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME) | |
464 | mark_sym_for_renaming (gimple_vdef (stmt)); | |
465 | ||
dadb7503 | 466 | /* Check for the common case of 1 def that hasn't changed. */ |
467 | if (old_ops && old_ops->next == NULL && num == 1 | |
468 | && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops)) | |
469 | return; | |
b5b59dda | 470 | |
471 | /* If there is anything in the old list, free it. */ | |
472 | if (old_ops) | |
473 | { | |
fcbe34ba | 474 | old_ops->next = gimple_ssa_operands (cfun)->free_defs; |
475 | gimple_ssa_operands (cfun)->free_defs = old_ops; | |
b5b59dda | 476 | } |
477 | ||
dadb7503 | 478 | /* If there is anything remaining in the build_defs list, simply emit it. */ |
479 | for ( ; new_i < num; new_i++) | |
480 | last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last); | |
481 | ||
b5b59dda | 482 | /* Now set the stmt's operands. */ |
75a70cf9 | 483 | gimple_set_def_ops (stmt, new_list.next); |
b5b59dda | 484 | } |
b66731e8 | 485 | |
4ee9c684 | 486 | |
b5b59dda | 487 | /* Takes elements from build_uses and turns them into use operands of STMT. |
09aca5bc | 488 | TODO -- Make build_uses VEC of tree *. */ |
b5b59dda | 489 | |
490 | static inline void | |
75a70cf9 | 491 | finalize_ssa_uses (gimple stmt) |
b5b59dda | 492 | { |
493 | unsigned new_i; | |
494 | struct use_optype_d new_list; | |
495 | use_optype_p old_ops, ptr, last; | |
b5b59dda | 496 | |
dd277d48 | 497 | /* Pre-pend the VUSE we may have built. */ |
498 | if (build_vuse != NULL_TREE) | |
499 | { | |
500 | tree oldvuse = gimple_vuse (stmt); | |
501 | if (oldvuse | |
502 | && TREE_CODE (oldvuse) == SSA_NAME) | |
503 | oldvuse = SSA_NAME_VAR (oldvuse); | |
504 | if (oldvuse != (build_vuse != NULL_TREE | |
505 | ? build_vuse : build_vdef)) | |
506 | gimple_set_vuse (stmt, NULL_TREE); | |
507 | VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt)); | |
508 | } | |
509 | ||
b5b59dda | 510 | new_list.next = NULL; |
511 | last = &new_list; | |
512 | ||
75a70cf9 | 513 | old_ops = gimple_use_ops (stmt); |
b5b59dda | 514 | |
dd277d48 | 515 | /* Clear a no longer necessary VUSE. */ |
516 | if (build_vuse == NULL_TREE | |
517 | && gimple_vuse (stmt) != NULL_TREE) | |
518 | gimple_set_vuse (stmt, NULL_TREE); | |
519 | ||
b5b59dda | 520 | /* If there is anything in the old list, free it. */ |
521 | if (old_ops) | |
522 | { | |
523 | for (ptr = old_ops; ptr; ptr = ptr->next) | |
524 | delink_imm_use (USE_OP_PTR (ptr)); | |
fcbe34ba | 525 | old_ops->next = gimple_ssa_operands (cfun)->free_uses; |
526 | gimple_ssa_operands (cfun)->free_uses = old_ops; | |
b5b59dda | 527 | } |
528 | ||
dd277d48 | 529 | /* If we added a VUSE, make sure to set the operand if it is not already |
530 | present and mark it for renaming. */ | |
531 | if (build_vuse != NULL_TREE | |
532 | && gimple_vuse (stmt) == NULL_TREE) | |
533 | { | |
534 | gimple_set_vuse (stmt, gimple_vop (cfun)); | |
535 | mark_sym_for_renaming (gimple_vop (cfun)); | |
536 | } | |
537 | ||
09aca5bc | 538 | /* Now create nodes for all the new nodes. */ |
539 | for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++) | |
48e1416a | 540 | last = add_use_op (stmt, |
541 | (tree *) VEC_index (tree, build_uses, new_i), | |
dadb7503 | 542 | last); |
09aca5bc | 543 | |
b5b59dda | 544 | /* Now set the stmt's operands. */ |
75a70cf9 | 545 | gimple_set_use_ops (stmt, new_list.next); |
4ee9c684 | 546 | } |
5b110d39 | 547 | |
4fb5e5ca | 548 | |
549 | /* Clear the in_list bits and empty the build array for VDEFs and | |
550 | VUSEs. */ | |
b5b59dda | 551 | |
552 | static inline void | |
4fb5e5ca | 553 | cleanup_build_arrays (void) |
b5b59dda | 554 | { |
dd277d48 | 555 | build_vdef = NULL_TREE; |
556 | build_vuse = NULL_TREE; | |
4fb5e5ca | 557 | VEC_truncate (tree, build_defs, 0); |
558 | VEC_truncate (tree, build_uses, 0); | |
2cf24776 | 559 | } |
560 | ||
4ee9c684 | 561 | |
5b110d39 | 562 | /* Finalize all the build vectors, fill the new ones into INFO. */ |
48e1416a | 563 | |
5b110d39 | 564 | static inline void |
75a70cf9 | 565 | finalize_ssa_stmt_operands (gimple stmt) |
5b110d39 | 566 | { |
b66731e8 | 567 | finalize_ssa_defs (stmt); |
568 | finalize_ssa_uses (stmt); | |
4fb5e5ca | 569 | cleanup_build_arrays (); |
4ee9c684 | 570 | } |
571 | ||
572 | ||
5b110d39 | 573 | /* Start the process of building up operands vectors in INFO. */ |
574 | ||
575 | static inline void | |
576 | start_ssa_stmt_operands (void) | |
4ee9c684 | 577 | { |
ed542b9f | 578 | gcc_assert (VEC_length (tree, build_defs) == 0); |
579 | gcc_assert (VEC_length (tree, build_uses) == 0); | |
dd277d48 | 580 | gcc_assert (build_vuse == NULL_TREE); |
581 | gcc_assert (build_vdef == NULL_TREE); | |
4ee9c684 | 582 | } |
583 | ||
584 | ||
5b110d39 | 585 | /* Add DEF_P to the list of pointers to operands. */ |
4ee9c684 | 586 | |
587 | static inline void | |
5b110d39 | 588 | append_def (tree *def_p) |
4ee9c684 | 589 | { |
4fb5e5ca | 590 | VEC_safe_push (tree, heap, build_defs, (tree) def_p); |
4ee9c684 | 591 | } |
592 | ||
593 | ||
5b110d39 | 594 | /* Add USE_P to the list of pointers to operands. */ |
4ee9c684 | 595 | |
596 | static inline void | |
5b110d39 | 597 | append_use (tree *use_p) |
4ee9c684 | 598 | { |
4fb5e5ca | 599 | VEC_safe_push (tree, heap, build_uses, (tree) use_p); |
4ee9c684 | 600 | } |
601 | ||
602 | ||
4fb5e5ca | 603 | /* Add VAR to the set of variables that require a VDEF operator. */ |
4ee9c684 | 604 | |
5b110d39 | 605 | static inline void |
4fb5e5ca | 606 | append_vdef (tree var) |
4ee9c684 | 607 | { |
17fbf1b8 | 608 | if (!optimize) |
609 | return; | |
610 | ||
dd277d48 | 611 | gcc_assert ((build_vdef == NULL_TREE |
612 | || build_vdef == var) | |
613 | && (build_vuse == NULL_TREE | |
614 | || build_vuse == var)); | |
4fb5e5ca | 615 | |
dd277d48 | 616 | build_vdef = var; |
617 | build_vuse = var; | |
4ee9c684 | 618 | } |
619 | ||
620 | ||
4fb5e5ca | 621 | /* Add VAR to the set of variables that require a VUSE operator. */ |
4ee9c684 | 622 | |
5b110d39 | 623 | static inline void |
624 | append_vuse (tree var) | |
4ee9c684 | 625 | { |
17fbf1b8 | 626 | if (!optimize) |
627 | return; | |
628 | ||
dd277d48 | 629 | gcc_assert (build_vuse == NULL_TREE |
630 | || build_vuse == var); | |
4ee9c684 | 631 | |
dd277d48 | 632 | build_vuse = var; |
22aa74c4 | 633 | } |
634 | ||
dd277d48 | 635 | /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ |
f0e6e3c1 | 636 | |
dd277d48 | 637 | static void |
638 | add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags) | |
639 | { | |
640 | /* Add virtual operands to the stmt, unless the caller has specifically | |
641 | requested not to do that (used when adding operands inside an | |
642 | ADDR_EXPR expression). */ | |
643 | if (flags & opf_no_vops) | |
644 | return; | |
645 | ||
9845d120 | 646 | gcc_assert (!is_gimple_debug (stmt)); |
647 | ||
dd277d48 | 648 | if (flags & opf_def) |
649 | append_vdef (gimple_vop (cfun)); | |
650 | else | |
651 | append_vuse (gimple_vop (cfun)); | |
b66731e8 | 652 | } |
653 | ||
b66731e8 | 654 | |
75a70cf9 | 655 | /* Add *VAR_P to the appropriate operand array for statement STMT. |
656 | FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, | |
657 | it will be added to the statement's real operands, otherwise it is | |
658 | added to virtual operands. */ | |
fa999566 | 659 | |
660 | static void | |
75a70cf9 | 661 | add_stmt_operand (tree *var_p, gimple stmt, int flags) |
b66731e8 | 662 | { |
fa999566 | 663 | tree var, sym; |
b66731e8 | 664 | |
75a70cf9 | 665 | gcc_assert (SSA_VAR_P (*var_p)); |
b66731e8 | 666 | |
4fb5e5ca | 667 | var = *var_p; |
fa999566 | 668 | sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); |
b66731e8 | 669 | |
4fb5e5ca | 670 | /* Mark statements with volatile operands. */ |
587838bb | 671 | if (!(flags & opf_no_vops) |
672 | && TREE_THIS_VOLATILE (sym)) | |
75a70cf9 | 673 | gimple_set_has_volatile_ops (stmt, true); |
b66731e8 | 674 | |
4fb5e5ca | 675 | if (is_gimple_reg (sym)) |
b66731e8 | 676 | { |
fa999566 | 677 | /* The variable is a GIMPLE register. Add it to real operands. */ |
4fb5e5ca | 678 | if (flags & opf_def) |
fa999566 | 679 | append_def (var_p); |
680 | else | |
681 | append_use (var_p); | |
b66731e8 | 682 | } |
fa999566 | 683 | else |
dd277d48 | 684 | add_virtual_operand (stmt, flags); |
fa999566 | 685 | } |
b66731e8 | 686 | |
6d5ec6f8 | 687 | /* Mark the base address of REF as having its address taken. |
688 | REF may be a single variable whose address has been taken or any | |
689 | other valid GIMPLE memory reference (structure reference, array, | |
690 | etc). */ | |
b66731e8 | 691 | |
fa999566 | 692 | static void |
6d5ec6f8 | 693 | mark_address_taken (tree ref) |
4ec25329 | 694 | { |
dd277d48 | 695 | tree var; |
b66731e8 | 696 | |
dd277d48 | 697 | /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF |
698 | as the only thing we take the address of. If VAR is a structure, | |
699 | taking the address of a field means that the whole structure may | |
700 | be referenced using pointer arithmetic. See PR 21407 and the | |
701 | ensuing mailing list discussion. */ | |
702 | var = get_base_address (ref); | |
182cf5a9 | 703 | if (var) |
704 | { | |
705 | if (DECL_P (var)) | |
706 | TREE_ADDRESSABLE (var) = 1; | |
707 | else if (TREE_CODE (var) == MEM_REF | |
708 | && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR | |
709 | && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0))) | |
710 | TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1; | |
711 | } | |
22aa74c4 | 712 | } |
713 | ||
4ec25329 | 714 | |
5d9de213 | 715 | /* A subroutine of get_expr_operands to handle MEM_REF. |
cb7f680b | 716 | |
182cf5a9 | 717 | STMT is the statement being processed, EXPR is the MEM_REF |
cb7f680b | 718 | that got us here. |
48e1416a | 719 | |
cb7f680b | 720 | FLAGS is as in get_expr_operands. |
721 | ||
cb7f680b | 722 | RECURSE_ON_BASE should be set to true if we want to continue |
723 | calling get_expr_operands on the base pointer, and false if | |
724 | something else will do it for us. */ | |
725 | ||
726 | static void | |
dd277d48 | 727 | get_indirect_ref_operands (gimple stmt, tree expr, int flags, |
4ec25329 | 728 | bool recurse_on_base) |
cb7f680b | 729 | { |
730 | tree *pptr = &TREE_OPERAND (expr, 0); | |
cb7f680b | 731 | |
587838bb | 732 | if (!(flags & opf_no_vops) |
733 | && TREE_THIS_VOLATILE (expr)) | |
75a70cf9 | 734 | gimple_set_has_volatile_ops (stmt, true); |
cb7f680b | 735 | |
dd277d48 | 736 | /* Add the VOP. */ |
737 | add_virtual_operand (stmt, flags); | |
738 | ||
739 | /* If requested, add a USE operand for the base pointer. */ | |
740 | if (recurse_on_base) | |
9845d120 | 741 | get_expr_operands (stmt, pptr, |
182cf5a9 | 742 | opf_non_addressable | opf_use |
743 | | (flags & (opf_no_vops|opf_not_non_addressable))); | |
cb7f680b | 744 | } |
a002e999 | 745 | |
4ec25329 | 746 | |
fa999566 | 747 | /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ |
4ee9c684 | 748 | |
749 | static void | |
75a70cf9 | 750 | get_tmr_operands (gimple stmt, tree expr, int flags) |
4ee9c684 | 751 | { |
587838bb | 752 | if (!(flags & opf_no_vops) |
753 | && TREE_THIS_VOLATILE (expr)) | |
9c44b395 | 754 | gimple_set_has_volatile_ops (stmt, true); |
755 | ||
4fb5e5ca | 756 | /* First record the real operands. */ |
afcada6e | 757 | get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops)); |
758 | get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops)); | |
28daba6f | 759 | get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops)); |
4ee9c684 | 760 | |
dd277d48 | 761 | add_virtual_operand (stmt, flags); |
fa999566 | 762 | } |
763 | ||
764 | ||
75a70cf9 | 765 | /* If STMT is a call that may clobber globals and other symbols that |
766 | escape, add them to the VDEF/VUSE lists for it. */ | |
fa999566 | 767 | |
768 | static void | |
dd277d48 | 769 | maybe_add_call_vops (gimple stmt) |
fa999566 | 770 | { |
75a70cf9 | 771 | int call_flags = gimple_call_flags (stmt); |
fa999566 | 772 | |
4fb5e5ca | 773 | /* If aliases have been computed already, add VDEF or VUSE |
fa999566 | 774 | operands for all the symbols that have been found to be |
4fb5e5ca | 775 | call-clobbered. */ |
dd277d48 | 776 | if (!(call_flags & ECF_NOVOPS)) |
fa999566 | 777 | { |
48e1416a | 778 | /* A 'pure' or a 'const' function never call-clobbers anything. |
779 | A 'noreturn' function might, but since we don't return anyway | |
780 | there is no point in recording that. */ | |
75a70cf9 | 781 | if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) |
dd277d48 | 782 | add_virtual_operand (stmt, opf_def); |
fa999566 | 783 | else if (!(call_flags & ECF_CONST)) |
dd277d48 | 784 | add_virtual_operand (stmt, opf_use); |
fa999566 | 785 | } |
fa999566 | 786 | } |
787 | ||
788 | ||
789 | /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ | |
790 | ||
791 | static void | |
75a70cf9 | 792 | get_asm_expr_operands (gimple stmt) |
fa999566 | 793 | { |
75a70cf9 | 794 | size_t i, noutputs; |
4fb5e5ca | 795 | const char **oconstraints; |
fa999566 | 796 | const char *constraint; |
797 | bool allows_mem, allows_reg, is_inout; | |
4fb5e5ca | 798 | |
75a70cf9 | 799 | noutputs = gimple_asm_noutputs (stmt); |
4fb5e5ca | 800 | oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); |
fa999566 | 801 | |
4fb5e5ca | 802 | /* Gather all output operands. */ |
75a70cf9 | 803 | for (i = 0; i < gimple_asm_noutputs (stmt); i++) |
fa999566 | 804 | { |
75a70cf9 | 805 | tree link = gimple_asm_output_op (stmt, i); |
f6255040 | 806 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
807 | oconstraints[i] = constraint; | |
808 | parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
809 | &allows_reg, &is_inout); | |
fa999566 | 810 | |
811 | /* This should have been split in gimplify_asm_expr. */ | |
812 | gcc_assert (!allows_reg || !is_inout); | |
813 | ||
814 | /* Memory operands are addressable. Note that STMT needs the | |
815 | address of this operand. */ | |
816 | if (!allows_reg && allows_mem) | |
7f2d9047 | 817 | mark_address_taken (TREE_VALUE (link)); |
fa999566 | 818 | |
182cf5a9 | 819 | get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable); |
fa999566 | 820 | } |
821 | ||
4fb5e5ca | 822 | /* Gather all input operands. */ |
75a70cf9 | 823 | for (i = 0; i < gimple_asm_ninputs (stmt); i++) |
fa999566 | 824 | { |
75a70cf9 | 825 | tree link = gimple_asm_input_op (stmt, i); |
fa999566 | 826 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
4fb5e5ca | 827 | parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, |
828 | &allows_mem, &allows_reg); | |
fa999566 | 829 | |
830 | /* Memory operands are addressable. Note that STMT needs the | |
831 | address of this operand. */ | |
832 | if (!allows_reg && allows_mem) | |
7f2d9047 | 833 | mark_address_taken (TREE_VALUE (link)); |
fa999566 | 834 | |
182cf5a9 | 835 | get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable); |
fa999566 | 836 | } |
837 | ||
4fb5e5ca | 838 | /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ |
97cf41ec | 839 | if (gimple_asm_clobbers_memory_p (stmt)) |
840 | add_virtual_operand (stmt, opf_def); | |
f6255040 | 841 | } |
842 | ||
843 | ||
fa999566 | 844 | /* Recursively scan the expression pointed to by EXPR_P in statement |
f6255040 | 845 | STMT. FLAGS is one of the OPF_* constants modifying how to |
846 | interpret the operands found. */ | |
fa999566 | 847 | |
848 | static void | |
75a70cf9 | 849 | get_expr_operands (gimple stmt, tree *expr_p, int flags) |
fa999566 | 850 | { |
851 | enum tree_code code; | |
f0d6e81c | 852 | enum tree_code_class codeclass; |
fa999566 | 853 | tree expr = *expr_p; |
9845d120 | 854 | int uflags = opf_use; |
fa999566 | 855 | |
856 | if (expr == NULL) | |
857 | return; | |
858 | ||
9845d120 | 859 | if (is_gimple_debug (stmt)) |
860 | uflags |= (flags & opf_no_vops); | |
861 | ||
fa999566 | 862 | code = TREE_CODE (expr); |
f0d6e81c | 863 | codeclass = TREE_CODE_CLASS (code); |
fa999566 | 864 | |
865 | switch (code) | |
866 | { | |
867 | case ADDR_EXPR: | |
868 | /* Taking the address of a variable does not represent a | |
869 | reference to it, but the fact that the statement takes its | |
870 | address will be of interest to some passes (e.g. alias | |
871 | resolution). */ | |
182cf5a9 | 872 | if ((!(flags & opf_non_addressable) |
873 | || (flags & opf_not_non_addressable)) | |
874 | && !is_gimple_debug (stmt)) | |
9845d120 | 875 | mark_address_taken (TREE_OPERAND (expr, 0)); |
fa999566 | 876 | |
877 | /* If the address is invariant, there may be no interesting | |
878 | variable references inside. */ | |
879 | if (is_gimple_min_invariant (expr)) | |
880 | return; | |
881 | ||
882 | /* Otherwise, there may be variables referenced inside but there | |
883 | should be no VUSEs created, since the referenced objects are | |
884 | not really accessed. The only operands that we should find | |
885 | here are ARRAY_REF indices which will always be real operands | |
886 | (GIMPLE does not allow non-registers as array indices). */ | |
887 | flags |= opf_no_vops; | |
182cf5a9 | 888 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), |
889 | flags | opf_not_non_addressable); | |
fa999566 | 890 | return; |
891 | ||
892 | case SSA_NAME: | |
75a70cf9 | 893 | add_stmt_operand (expr_p, stmt, flags); |
fa999566 | 894 | return; |
895 | ||
896 | case VAR_DECL: | |
897 | case PARM_DECL: | |
898 | case RESULT_DECL: | |
75a70cf9 | 899 | add_stmt_operand (expr_p, stmt, flags); |
2afb4be3 | 900 | return; |
fa999566 | 901 | |
688ff29b | 902 | case DEBUG_EXPR_DECL: |
903 | gcc_assert (gimple_debug_bind_p (stmt)); | |
904 | return; | |
905 | ||
182cf5a9 | 906 | case MEM_REF: |
dd277d48 | 907 | get_indirect_ref_operands (stmt, expr, flags, true); |
fa999566 | 908 | return; |
909 | ||
910 | case TARGET_MEM_REF: | |
911 | get_tmr_operands (stmt, expr, flags); | |
912 | return; | |
913 | ||
fa999566 | 914 | case ARRAY_REF: |
f6255040 | 915 | case ARRAY_RANGE_REF: |
fa999566 | 916 | case COMPONENT_REF: |
917 | case REALPART_EXPR: | |
918 | case IMAGPART_EXPR: | |
919 | { | |
587838bb | 920 | if (!(flags & opf_no_vops) |
921 | && TREE_THIS_VOLATILE (expr)) | |
75a70cf9 | 922 | gimple_set_has_volatile_ops (stmt, true); |
8e4c4d3b | 923 | |
4fb5e5ca | 924 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
48e1416a | 925 | |
2be14d8b | 926 | if (code == COMPONENT_REF) |
7fecfde9 | 927 | { |
587838bb | 928 | if (!(flags & opf_no_vops) |
929 | && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) | |
75a70cf9 | 930 | gimple_set_has_volatile_ops (stmt, true); |
9845d120 | 931 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); |
7fecfde9 | 932 | } |
f6255040 | 933 | else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) |
03c253f3 | 934 | { |
9845d120 | 935 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); |
936 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); | |
937 | get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags); | |
03c253f3 | 938 | } |
a002e999 | 939 | |
2be14d8b | 940 | return; |
941 | } | |
a002e999 | 942 | |
80f06481 | 943 | case WITH_SIZE_EXPR: |
454b4e1f | 944 | /* WITH_SIZE_EXPR is a pass-through reference to its first argument, |
80f06481 | 945 | and an rvalue reference to its second argument. */ |
9845d120 | 946 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); |
5b110d39 | 947 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
80f06481 | 948 | return; |
949 | ||
07c03fb0 | 950 | case COND_EXPR: |
bd2ec699 | 951 | case VEC_COND_EXPR: |
f4803722 | 952 | case VEC_PERM_EXPR: |
9845d120 | 953 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags); |
954 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); | |
955 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); | |
07c03fb0 | 956 | return; |
957 | ||
f9c6943b | 958 | case CONSTRUCTOR: |
959 | { | |
960 | /* General aggregate CONSTRUCTORs have been decomposed, but they | |
961 | are still in use as the COMPLEX_EXPR equivalent for vectors. */ | |
c75b4594 | 962 | constructor_elt *ce; |
963 | unsigned HOST_WIDE_INT idx; | |
f9c6943b | 964 | |
3c25489e | 965 | /* A volatile constructor is actually TREE_CLOBBER_P, transfer |
966 | the volatility to the statement, don't use TREE_CLOBBER_P for | |
967 | mirroring the other uses of THIS_VOLATILE in this file. */ | |
587838bb | 968 | if (!(flags & opf_no_vops) |
969 | && TREE_THIS_VOLATILE (expr)) | |
3c25489e | 970 | gimple_set_has_volatile_ops (stmt, true); |
971 | ||
c75b4594 | 972 | for (idx = 0; |
973 | VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce); | |
974 | idx++) | |
9845d120 | 975 | get_expr_operands (stmt, &ce->value, uflags); |
f9c6943b | 976 | |
977 | return; | |
978 | } | |
979 | ||
c9a1e1e0 | 980 | case BIT_FIELD_REF: |
587838bb | 981 | if (!(flags & opf_no_vops) |
982 | && TREE_THIS_VOLATILE (expr)) | |
1e342984 | 983 | gimple_set_has_volatile_ops (stmt, true); |
984 | /* FALLTHRU */ | |
985 | ||
2c0bc8ce | 986 | case VIEW_CONVERT_EXPR: |
c9a1e1e0 | 987 | do_unary: |
5b110d39 | 988 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
4ee9c684 | 989 | return; |
4ee9c684 | 990 | |
c9a1e1e0 | 991 | case COMPOUND_EXPR: |
992 | case OBJ_TYPE_REF: | |
88dbf20f | 993 | case ASSERT_EXPR: |
c9a1e1e0 | 994 | do_binary: |
995 | { | |
5b110d39 | 996 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
997 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
c9a1e1e0 | 998 | return; |
999 | } | |
1000 | ||
4a61a337 | 1001 | case DOT_PROD_EXPR: |
b056d812 | 1002 | case REALIGN_LOAD_EXPR: |
00f4f705 | 1003 | case WIDEN_MULT_PLUS_EXPR: |
1004 | case WIDEN_MULT_MINUS_EXPR: | |
156f51b9 | 1005 | case FMA_EXPR: |
b056d812 | 1006 | { |
1007 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
00f4f705 | 1008 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); |
1009 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags); | |
1010 | return; | |
b056d812 | 1011 | } |
1012 | ||
c9a1e1e0 | 1013 | case FUNCTION_DECL: |
c9a1e1e0 | 1014 | case LABEL_DECL: |
bef99423 | 1015 | case CONST_DECL: |
75a70cf9 | 1016 | case CASE_LABEL_EXPR: |
fa999566 | 1017 | /* Expressions that make no memory references. */ |
c9a1e1e0 | 1018 | return; |
fa999566 | 1019 | |
1020 | default: | |
f0d6e81c | 1021 | if (codeclass == tcc_unary) |
fa999566 | 1022 | goto do_unary; |
f0d6e81c | 1023 | if (codeclass == tcc_binary || codeclass == tcc_comparison) |
fa999566 | 1024 | goto do_binary; |
f0d6e81c | 1025 | if (codeclass == tcc_constant || codeclass == tcc_type) |
fa999566 | 1026 | return; |
a002e999 | 1027 | } |
c9a1e1e0 | 1028 | |
fa999566 | 1029 | /* If we get here, something has gone wrong. */ |
1030 | #ifdef ENABLE_CHECKING | |
1031 | fprintf (stderr, "unhandled expression in get_expr_operands():\n"); | |
1032 | debug_tree (expr); | |
1033 | fputs ("\n", stderr); | |
1034 | #endif | |
1035 | gcc_unreachable (); | |
c9a1e1e0 | 1036 | } |
1037 | ||
a002e999 | 1038 | |
f6255040 | 1039 | /* Parse STMT looking for operands. When finished, the various |
1040 | build_* operand vectors will have potential operands in them. */ | |
1041 | ||
aed164c3 | 1042 | static void |
75a70cf9 | 1043 | parse_ssa_operands (gimple stmt) |
aed164c3 | 1044 | { |
75a70cf9 | 1045 | enum gimple_code code = gimple_code (stmt); |
b65fbe25 | 1046 | size_t i, n, start = 0; |
aed164c3 | 1047 | |
b65fbe25 | 1048 | switch (code) |
9845d120 | 1049 | { |
b65fbe25 | 1050 | case GIMPLE_ASM: |
1051 | get_asm_expr_operands (stmt); | |
1052 | break; | |
1053 | ||
1054 | case GIMPLE_TRANSACTION: | |
1055 | /* The start of a transaction is a memory barrier. */ | |
1056 | add_virtual_operand (stmt, opf_def | opf_use); | |
1057 | break; | |
1058 | ||
1059 | case GIMPLE_DEBUG: | |
9845d120 | 1060 | if (gimple_debug_bind_p (stmt) |
1061 | && gimple_debug_bind_has_value_p (stmt)) | |
1062 | get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt), | |
1063 | opf_use | opf_no_vops); | |
b65fbe25 | 1064 | break; |
fa999566 | 1065 | |
b65fbe25 | 1066 | case GIMPLE_RETURN: |
1067 | append_vuse (gimple_vop (cfun)); | |
1068 | goto do_default; | |
fa999566 | 1069 | |
b65fbe25 | 1070 | case GIMPLE_CALL: |
75a70cf9 | 1071 | /* Add call-clobbered operands, if needed. */ |
b65fbe25 | 1072 | maybe_add_call_vops (stmt); |
1073 | /* FALLTHRU */ | |
2109076a | 1074 | |
b65fbe25 | 1075 | case GIMPLE_ASSIGN: |
1076 | get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def); | |
1077 | start = 1; | |
1078 | /* FALLTHRU */ | |
1079 | ||
1080 | default: | |
1081 | do_default: | |
1082 | n = gimple_num_ops (stmt); | |
1083 | for (i = start; i < n; i++) | |
1084 | get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use); | |
1085 | break; | |
ca9c9daf | 1086 | } |
aed164c3 | 1087 | } |
1088 | ||
a002e999 | 1089 | |
fa999566 | 1090 | /* Create an operands cache for STMT. */ |
c9a1e1e0 | 1091 | |
1092 | static void | |
75a70cf9 | 1093 | build_ssa_operands (gimple stmt) |
c9a1e1e0 | 1094 | { |
6d5ec6f8 | 1095 | /* Initially assume that the statement has no volatile operands. */ |
75a70cf9 | 1096 | gimple_set_has_volatile_ops (stmt, false); |
75a70cf9 | 1097 | |
fa999566 | 1098 | start_ssa_stmt_operands (); |
fa999566 | 1099 | parse_ssa_operands (stmt); |
fa999566 | 1100 | finalize_ssa_stmt_operands (stmt); |
1101 | } | |
39b644e9 | 1102 | |
85f3d834 | 1103 | /* Verifies SSA statement operands. */ |
1104 | ||
1105 | DEBUG_FUNCTION bool | |
1106 | verify_ssa_operands (gimple stmt) | |
1107 | { | |
1108 | use_operand_p use_p; | |
1109 | def_operand_p def_p; | |
1110 | ssa_op_iter iter; | |
1111 | unsigned i; | |
1112 | tree use, def; | |
1113 | bool volatile_p = gimple_has_volatile_ops (stmt); | |
1114 | ||
1115 | /* build_ssa_operands w/o finalizing them. */ | |
1116 | gimple_set_has_volatile_ops (stmt, false); | |
1117 | start_ssa_stmt_operands (); | |
1118 | parse_ssa_operands (stmt); | |
1119 | ||
1120 | /* Now verify the built operands are the same as present in STMT. */ | |
1121 | def = gimple_vdef (stmt); | |
1122 | if (def | |
1123 | && TREE_CODE (def) == SSA_NAME) | |
1124 | def = SSA_NAME_VAR (def); | |
1125 | if (build_vdef != def) | |
1126 | { | |
1127 | error ("virtual definition of statement not up-to-date"); | |
1128 | return true; | |
1129 | } | |
1130 | if (gimple_vdef (stmt) | |
1131 | && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P | |
1132 | || DEF_FROM_PTR (def_p) != gimple_vdef (stmt))) | |
1133 | { | |
1134 | error ("virtual def operand missing for stmt"); | |
1135 | return true; | |
1136 | } | |
1137 | ||
1138 | use = gimple_vuse (stmt); | |
1139 | if (use | |
1140 | && TREE_CODE (use) == SSA_NAME) | |
1141 | use = SSA_NAME_VAR (use); | |
1142 | if (build_vuse != use) | |
1143 | { | |
1144 | error ("virtual use of statement not up-to-date"); | |
1145 | return true; | |
1146 | } | |
1147 | if (gimple_vuse (stmt) | |
1148 | && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P | |
1149 | || USE_FROM_PTR (use_p) != gimple_vuse (stmt))) | |
1150 | { | |
1151 | error ("virtual use operand missing for stmt"); | |
1152 | return true; | |
1153 | } | |
1154 | ||
1155 | FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) | |
1156 | { | |
1157 | FOR_EACH_VEC_ELT (tree, build_uses, i, use) | |
1158 | { | |
1159 | if (use_p->use == (tree *)use) | |
1160 | { | |
1161 | VEC_replace (tree, build_uses, i, NULL_TREE); | |
1162 | break; | |
1163 | } | |
1164 | } | |
1165 | if (i == VEC_length (tree, build_uses)) | |
1166 | { | |
1167 | error ("excess use operand for stmt"); | |
1168 | debug_generic_expr (USE_FROM_PTR (use_p)); | |
1169 | return true; | |
1170 | } | |
1171 | } | |
1172 | FOR_EACH_VEC_ELT (tree, build_uses, i, use) | |
1173 | if (use != NULL_TREE) | |
1174 | { | |
1175 | error ("use operand missing for stmt"); | |
1176 | debug_generic_expr (*(tree *)use); | |
1177 | return true; | |
1178 | } | |
1179 | ||
1180 | FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF) | |
1181 | { | |
1182 | FOR_EACH_VEC_ELT (tree, build_defs, i, def) | |
1183 | { | |
1184 | if (def_p == (tree *)def) | |
1185 | { | |
1186 | VEC_replace (tree, build_defs, i, NULL_TREE); | |
1187 | break; | |
1188 | } | |
1189 | } | |
1190 | if (i == VEC_length (tree, build_defs)) | |
1191 | { | |
1192 | error ("excess def operand for stmt"); | |
1193 | debug_generic_expr (DEF_FROM_PTR (def_p)); | |
1194 | return true; | |
1195 | } | |
1196 | } | |
1197 | FOR_EACH_VEC_ELT (tree, build_defs, i, def) | |
1198 | if (def != NULL_TREE) | |
1199 | { | |
1200 | error ("def operand missing for stmt"); | |
1201 | debug_generic_expr (*(tree *)def); | |
1202 | return true; | |
1203 | } | |
1204 | ||
1205 | if (gimple_has_volatile_ops (stmt) != volatile_p) | |
1206 | { | |
1207 | error ("stmt volatile flag not up-to-date"); | |
1208 | return true; | |
1209 | } | |
1210 | ||
1211 | cleanup_build_arrays (); | |
1212 | return false; | |
1213 | } | |
1214 | ||
4ec25329 | 1215 | |
28c92cbb | 1216 | /* Releases the operands of STMT back to their freelists, and clears |
1217 | the stmt operand lists. */ | |
1218 | ||
1219 | void | |
75a70cf9 | 1220 | free_stmt_operands (gimple stmt) |
28c92cbb | 1221 | { |
75a70cf9 | 1222 | def_optype_p defs = gimple_def_ops (stmt), last_def; |
1223 | use_optype_p uses = gimple_use_ops (stmt), last_use; | |
28c92cbb | 1224 | |
1225 | if (defs) | |
1226 | { | |
1227 | for (last_def = defs; last_def->next; last_def = last_def->next) | |
1228 | continue; | |
1229 | last_def->next = gimple_ssa_operands (cfun)->free_defs; | |
1230 | gimple_ssa_operands (cfun)->free_defs = defs; | |
75a70cf9 | 1231 | gimple_set_def_ops (stmt, NULL); |
28c92cbb | 1232 | } |
1233 | ||
1234 | if (uses) | |
1235 | { | |
1236 | for (last_use = uses; last_use->next; last_use = last_use->next) | |
1237 | delink_imm_use (USE_OP_PTR (last_use)); | |
1238 | delink_imm_use (USE_OP_PTR (last_use)); | |
1239 | last_use->next = gimple_ssa_operands (cfun)->free_uses; | |
1240 | gimple_ssa_operands (cfun)->free_uses = uses; | |
75a70cf9 | 1241 | gimple_set_use_ops (stmt, NULL); |
28c92cbb | 1242 | } |
1243 | ||
75a70cf9 | 1244 | if (gimple_has_mem_ops (stmt)) |
1245 | { | |
dd277d48 | 1246 | gimple_set_vuse (stmt, NULL_TREE); |
1247 | gimple_set_vdef (stmt, NULL_TREE); | |
75a70cf9 | 1248 | } |
c9a1e1e0 | 1249 | } |
1250 | ||
0b3f639d | 1251 | |
7dd75889 | 1252 | /* Get the operands of statement STMT. */ |
a002e999 | 1253 | |
fa999566 | 1254 | void |
75a70cf9 | 1255 | update_stmt_operands (gimple stmt) |
fa999566 | 1256 | { |
f6255040 | 1257 | /* If update_stmt_operands is called before SSA is initialized, do |
1258 | nothing. */ | |
fa999566 | 1259 | if (!ssa_operands_active ()) |
1260 | return; | |
2b99acb8 | 1261 | |
fa999566 | 1262 | timevar_push (TV_TREE_OPS); |
2b99acb8 | 1263 | |
e1f47fd3 | 1264 | /* If the stmt is a noreturn call queue it to be processed by |
1265 | split_bbs_on_noreturn_calls during cfg cleanup. */ | |
1266 | if (is_gimple_call (stmt) | |
1267 | && gimple_call_noreturn_p (stmt)) | |
1268 | VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), stmt); | |
1269 | ||
75a70cf9 | 1270 | gcc_assert (gimple_modified_p (stmt)); |
fa999566 | 1271 | build_ssa_operands (stmt); |
75a70cf9 | 1272 | gimple_set_modified (stmt, false); |
4ee9c684 | 1273 | |
fa999566 | 1274 | timevar_pop (TV_TREE_OPS); |
1275 | } | |
b0b70f22 | 1276 | |
f6255040 | 1277 | |
fa999566 | 1278 | /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done |
1279 | to test the validity of the swap operation. */ | |
b0b70f22 | 1280 | |
fa999566 | 1281 | void |
75a70cf9 | 1282 | swap_tree_operands (gimple stmt, tree *exp0, tree *exp1) |
fa999566 | 1283 | { |
1284 | tree op0, op1; | |
1285 | op0 = *exp0; | |
1286 | op1 = *exp1; | |
0b3f639d | 1287 | |
f6255040 | 1288 | /* If the operand cache is active, attempt to preserve the relative |
1289 | positions of these two operands in their respective immediate use | |
f3f02af0 | 1290 | lists by adjusting their use pointer to point to the new |
1291 | operand position. */ | |
fa999566 | 1292 | if (ssa_operands_active () && op0 != op1) |
1293 | { | |
1294 | use_optype_p use0, use1, ptr; | |
1295 | use0 = use1 = NULL; | |
0b3f639d | 1296 | |
fa999566 | 1297 | /* Find the 2 operands in the cache, if they are there. */ |
75a70cf9 | 1298 | for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) |
fa999566 | 1299 | if (USE_OP_PTR (ptr)->use == exp0) |
1300 | { | |
1301 | use0 = ptr; | |
1302 | break; | |
1303 | } | |
0b3f639d | 1304 | |
75a70cf9 | 1305 | for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) |
fa999566 | 1306 | if (USE_OP_PTR (ptr)->use == exp1) |
1307 | { | |
1308 | use1 = ptr; | |
1309 | break; | |
1310 | } | |
1311 | ||
f3f02af0 | 1312 | /* And adjust their location to point to the new position of the |
1313 | operand. */ | |
1314 | if (use0) | |
1315 | USE_OP_PTR (use0)->use = exp1; | |
1316 | if (use1) | |
1317 | USE_OP_PTR (use1)->use = exp0; | |
0b3f639d | 1318 | } |
fa999566 | 1319 | |
1320 | /* Now swap the data. */ | |
1321 | *exp0 = op1; | |
1322 | *exp1 = op0; | |
0b3f639d | 1323 | } |
1324 | ||
75a70cf9 | 1325 | |
22aa74c4 | 1326 | /* Scan the immediate_use list for VAR making sure its linked properly. |
f6255040 | 1327 | Return TRUE if there is a problem and emit an error message to F. */ |
22aa74c4 | 1328 | |
4b987fac | 1329 | DEBUG_FUNCTION bool |
22aa74c4 | 1330 | verify_imm_links (FILE *f, tree var) |
1331 | { | |
b66731e8 | 1332 | use_operand_p ptr, prev, list; |
22aa74c4 | 1333 | int count; |
1334 | ||
1335 | gcc_assert (TREE_CODE (var) == SSA_NAME); | |
1336 | ||
1337 | list = &(SSA_NAME_IMM_USE_NODE (var)); | |
1338 | gcc_assert (list->use == NULL); | |
1339 | ||
1340 | if (list->prev == NULL) | |
1341 | { | |
1342 | gcc_assert (list->next == NULL); | |
1343 | return false; | |
1344 | } | |
1345 | ||
1346 | prev = list; | |
1347 | count = 0; | |
1348 | for (ptr = list->next; ptr != list; ) | |
1349 | { | |
1350 | if (prev != ptr->prev) | |
1fa3a8f6 | 1351 | goto error; |
48e1416a | 1352 | |
22aa74c4 | 1353 | if (ptr->use == NULL) |
1fa3a8f6 | 1354 | goto error; /* 2 roots, or SAFE guard node. */ |
1355 | else if (*(ptr->use) != var) | |
1356 | goto error; | |
22aa74c4 | 1357 | |
1358 | prev = ptr; | |
1359 | ptr = ptr->next; | |
a002e999 | 1360 | |
1361 | /* Avoid infinite loops. 50,000,000 uses probably indicates a | |
1362 | problem. */ | |
f04f077c | 1363 | if (count++ > 50000000) |
1fa3a8f6 | 1364 | goto error; |
22aa74c4 | 1365 | } |
1366 | ||
1367 | /* Verify list in the other direction. */ | |
1368 | prev = list; | |
1369 | for (ptr = list->prev; ptr != list; ) | |
1370 | { | |
1371 | if (prev != ptr->next) | |
1fa3a8f6 | 1372 | goto error; |
22aa74c4 | 1373 | prev = ptr; |
1374 | ptr = ptr->prev; | |
1375 | if (count-- < 0) | |
1fa3a8f6 | 1376 | goto error; |
22aa74c4 | 1377 | } |
1378 | ||
1379 | if (count != 0) | |
1fa3a8f6 | 1380 | goto error; |
22aa74c4 | 1381 | |
1382 | return false; | |
1fa3a8f6 | 1383 | |
1384 | error: | |
75a70cf9 | 1385 | if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt)) |
1fa3a8f6 | 1386 | { |
75a70cf9 | 1387 | fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt); |
1388 | print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); | |
1fa3a8f6 | 1389 | } |
48e1416a | 1390 | fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, |
1fa3a8f6 | 1391 | (void *)ptr->use); |
1392 | print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); | |
1393 | fprintf(f, "\n"); | |
1394 | return true; | |
22aa74c4 | 1395 | } |
1396 | ||
1397 | ||
1398 | /* Dump all the immediate uses to FILE. */ | |
1399 | ||
1400 | void | |
1401 | dump_immediate_uses_for (FILE *file, tree var) | |
1402 | { | |
1403 | imm_use_iterator iter; | |
1404 | use_operand_p use_p; | |
1405 | ||
1406 | gcc_assert (var && TREE_CODE (var) == SSA_NAME); | |
1407 | ||
1408 | print_generic_expr (file, var, TDF_SLIM); | |
1409 | fprintf (file, " : -->"); | |
1410 | if (has_zero_uses (var)) | |
1411 | fprintf (file, " no uses.\n"); | |
1412 | else | |
1413 | if (has_single_use (var)) | |
1414 | fprintf (file, " single use.\n"); | |
1415 | else | |
1416 | fprintf (file, "%d uses.\n", num_imm_uses (var)); | |
1417 | ||
1418 | FOR_EACH_IMM_USE_FAST (use_p, iter, var) | |
1419 | { | |
75a70cf9 | 1420 | if (use_p->loc.stmt == NULL && use_p->use == NULL) |
66c8f3a9 | 1421 | fprintf (file, "***end of stmt iterator marker***\n"); |
b66731e8 | 1422 | else |
66c8f3a9 | 1423 | if (!is_gimple_reg (USE_FROM_PTR (use_p))) |
75a70cf9 | 1424 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); |
66c8f3a9 | 1425 | else |
75a70cf9 | 1426 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); |
22aa74c4 | 1427 | } |
1428 | fprintf(file, "\n"); | |
1429 | } | |
1430 | ||
a002e999 | 1431 | |
22aa74c4 | 1432 | /* Dump all the immediate uses to FILE. */ |
1433 | ||
1434 | void | |
1435 | dump_immediate_uses (FILE *file) | |
1436 | { | |
1437 | tree var; | |
1438 | unsigned int x; | |
1439 | ||
1440 | fprintf (file, "Immediate_uses: \n\n"); | |
1441 | for (x = 1; x < num_ssa_names; x++) | |
1442 | { | |
1443 | var = ssa_name(x); | |
1444 | if (!var) | |
1445 | continue; | |
1446 | dump_immediate_uses_for (file, var); | |
1447 | } | |
1448 | } | |
1449 | ||
1450 | ||
1451 | /* Dump def-use edges on stderr. */ | |
1452 | ||
4b987fac | 1453 | DEBUG_FUNCTION void |
22aa74c4 | 1454 | debug_immediate_uses (void) |
1455 | { | |
1456 | dump_immediate_uses (stderr); | |
1457 | } | |
1458 | ||
f6255040 | 1459 | |
22aa74c4 | 1460 | /* Dump def-use edges on stderr. */ |
1461 | ||
4b987fac | 1462 | DEBUG_FUNCTION void |
22aa74c4 | 1463 | debug_immediate_uses_for (tree var) |
1464 | { | |
1465 | dump_immediate_uses_for (stderr, var); | |
5b110d39 | 1466 | } |
de6ed584 | 1467 | |
1468 | ||
dd277d48 | 1469 | /* Unlink STMTs virtual definition from the IL by propagating its use. */ |
1470 | ||
1471 | void | |
1472 | unlink_stmt_vdef (gimple stmt) | |
1473 | { | |
1474 | use_operand_p use_p; | |
1475 | imm_use_iterator iter; | |
1476 | gimple use_stmt; | |
1477 | tree vdef = gimple_vdef (stmt); | |
1478 | ||
1479 | if (!vdef | |
1480 | || TREE_CODE (vdef) != SSA_NAME) | |
1481 | return; | |
1482 | ||
1483 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt)) | |
1484 | { | |
1485 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
1486 | SET_USE (use_p, gimple_vuse (stmt)); | |
1487 | } | |
de6ed584 | 1488 | |
dd277d48 | 1489 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt))) |
1490 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1; | |
de6ed584 | 1491 | } |
dd277d48 | 1492 |