]>
Commit | Line | Data |
---|---|---|
4ee9c684 | 1 | /* SSA operands management for trees. |
711789cc | 2 | Copyright (C) 2003-2013 Free Software Foundation, Inc. |
4ee9c684 | 3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8c4c00c1 | 8 | the Free Software Foundation; either version 3, or (at your option) |
4ee9c684 | 9 | any later version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
23 | #include "tm.h" | |
24 | #include "tree.h" | |
9ed99284 | 25 | #include "stmt.h" |
26 | #include "print-tree.h" | |
4ee9c684 | 27 | #include "flags.h" |
28 | #include "function.h" | |
ce084dfc | 29 | #include "gimple-pretty-print.h" |
073c1fd5 | 30 | #include "bitmap.h" |
31 | #include "gimple.h" | |
32 | #include "gimple-ssa.h" | |
33 | #include "tree-phinodes.h" | |
34 | #include "ssa-iterators.h" | |
9ed99284 | 35 | #include "stringpool.h" |
073c1fd5 | 36 | #include "tree-ssanames.h" |
4ee9c684 | 37 | #include "tree-inline.h" |
b9ed1410 | 38 | #include "timevar.h" |
39 | #include "dumpfile.h" | |
4ee9c684 | 40 | #include "ggc.h" |
41 | #include "timevar.h" | |
acc70efa | 42 | #include "langhooks.h" |
85f3d834 | 43 | #include "diagnostic-core.h" |
44 | ||
5b110d39 | 45 | |
48e1416a | 46 | /* This file contains the code required to manage the operands cache of the |
47 | SSA optimizer. For every stmt, we maintain an operand cache in the stmt | |
48 | annotation. This cache contains operands that will be of interest to | |
49 | optimizers and other passes wishing to manipulate the IL. | |
5b110d39 | 50 | |
48e1416a | 51 | The operand type are broken up into REAL and VIRTUAL operands. The real |
52 | operands are represented as pointers into the stmt's operand tree. Thus | |
5b110d39 | 53 | any manipulation of the real operands will be reflected in the actual tree. |
48e1416a | 54 | Virtual operands are represented solely in the cache, although the base |
55 | variable for the SSA_NAME may, or may not occur in the stmt's tree. | |
5b110d39 | 56 | Manipulation of the virtual operands will not be reflected in the stmt tree. |
57 | ||
48e1416a | 58 | The routines in this file are concerned with creating this operand cache |
5b110d39 | 59 | from a stmt tree. |
60 | ||
48e1416a | 61 | The operand tree is the parsed by the various get_* routines which look |
62 | through the stmt tree for the occurrence of operands which may be of | |
63 | interest, and calls are made to the append_* routines whenever one is | |
64 | found. There are 4 of these routines, each representing one of the | |
4fb5e5ca | 65 | 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. |
5b110d39 | 66 | |
48e1416a | 67 | The append_* routines check for duplication, and simply keep a list of |
5b110d39 | 68 | unique objects for each operand type in the build_* extendable vectors. |
69 | ||
48e1416a | 70 | Once the stmt tree is completely parsed, the finalize_ssa_operands() |
71 | routine is called, which proceeds to perform the finalization routine | |
4fb5e5ca | 72 | on each of the 4 operand vectors which have been built up. |
5b110d39 | 73 | |
48e1416a | 74 | If the stmt had a previous operand cache, the finalization routines |
75 | attempt to match up the new operands with the old ones. If it's a perfect | |
76 | match, the old vector is simply reused. If it isn't a perfect match, then | |
77 | a new vector is created and the new operands are placed there. For | |
78 | virtual operands, if the previous cache had SSA_NAME version of a | |
79 | variable, and that same variable occurs in the same operands cache, then | |
5b110d39 | 80 | the new cache vector will also get the same SSA_NAME. |
81 | ||
4ec25329 | 82 | i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new |
83 | operand vector for VUSE, then the new vector will also be modified | |
84 | such that it contains 'a_5' rather than 'a'. */ | |
5b110d39 | 85 | |
4fb5e5ca | 86 | |
59b2314d | 87 | /* Flags to describe operand properties in helpers. */ |
4ee9c684 | 88 | |
89 | /* By default, operands are loaded. */ | |
4fb5e5ca | 90 | #define opf_use 0 |
4ee9c684 | 91 | |
48e1416a | 92 | /* Operand is the target of an assignment expression or a |
f6255040 | 93 | call-clobbered variable. */ |
4fb5e5ca | 94 | #define opf_def (1 << 0) |
2cf24776 | 95 | |
4ee9c684 | 96 | /* No virtual operands should be created in the expression. This is used |
97 | when traversing ADDR_EXPR nodes which have different semantics than | |
98 | other expressions. Inside an ADDR_EXPR node, the only operands that we | |
99 | need to consider are indices into arrays. For instance, &a.b[i] should | |
100 | generate a USE of 'i' but it should not generate a VUSE for 'a' nor a | |
101 | VUSE for 'b'. */ | |
4fb5e5ca | 102 | #define opf_no_vops (1 << 1) |
4ee9c684 | 103 | |
4fb5e5ca | 104 | /* Operand is an implicit reference. This is used to distinguish |
75a70cf9 | 105 | explicit assignments in the form of MODIFY_EXPR from |
4fb5e5ca | 106 | clobbering sites like function calls or ASM_EXPRs. */ |
107 | #define opf_implicit (1 << 2) | |
868a0f34 | 108 | |
182cf5a9 | 109 | /* Operand is in a place where address-taken does not imply addressable. */ |
110 | #define opf_non_addressable (1 << 3) | |
111 | ||
112 | /* Operand is in a place where opf_non_addressable does not apply. */ | |
113 | #define opf_not_non_addressable (1 << 4) | |
114 | ||
4ee9c684 | 115 | /* Array for building all the use operands. */ |
f1f41a6c | 116 | static vec<tree> build_uses; |
4ee9c684 | 117 | |
dd277d48 | 118 | /* The built VDEF operand. */ |
119 | static tree build_vdef; | |
4ee9c684 | 120 | |
dd277d48 | 121 | /* The built VUSE operand. */ |
122 | static tree build_vuse; | |
4ee9c684 | 123 | |
48e1416a | 124 | /* Bitmap obstack for our datastructures that needs to survive across |
a7614546 | 125 | compilations of multiple functions. */ |
363d040e | 126 | static bitmap_obstack operands_bitmap_obstack; |
085b7aab | 127 | |
75a70cf9 | 128 | static void get_expr_operands (gimple, tree *, int); |
fa999566 | 129 | |
fcbe34ba | 130 | /* Number of functions with initialized ssa_operands. */ |
131 | static int n_initialized = 0; | |
5b110d39 | 132 | |
582791b0 | 133 | /* Accessor to tree-ssa-operands.c caches. */ |
134 | static inline struct ssa_operands * | |
135 | gimple_ssa_operands (const struct function *fun) | |
136 | { | |
137 | return &fun->gimple_df->ssa_operands; | |
138 | } | |
139 | ||
fa999566 | 140 | |
f6255040 | 141 | /* Return true if the SSA operands cache is active. */ |
5b110d39 | 142 | |
b66731e8 | 143 | bool |
8d672d12 | 144 | ssa_operands_active (struct function *fun) |
4ee9c684 | 145 | { |
8d672d12 | 146 | if (fun == NULL) |
75a70cf9 | 147 | return false; |
148 | ||
8d672d12 | 149 | return fun->gimple_df && gimple_ssa_operands (fun)->ops_active; |
b66731e8 | 150 | } |
4ee9c684 | 151 | |
48e1416a | 152 | |
dd277d48 | 153 | /* Create the VOP variable, an artificial global variable to act as a |
154 | representative of all of the virtual operands FUD chain. */ | |
fa999566 | 155 | |
dd277d48 | 156 | static void |
5084b2e4 | 157 | create_vop_var (struct function *fn) |
dadb7503 | 158 | { |
dd277d48 | 159 | tree global_var; |
160 | ||
5084b2e4 | 161 | gcc_assert (fn->gimple_df->vop == NULL_TREE); |
dd277d48 | 162 | |
e60a6f7b | 163 | global_var = build_decl (BUILTINS_LOCATION, VAR_DECL, |
164 | get_identifier (".MEM"), | |
dd277d48 | 165 | void_type_node); |
166 | DECL_ARTIFICIAL (global_var) = 1; | |
167 | TREE_READONLY (global_var) = 0; | |
168 | DECL_EXTERNAL (global_var) = 1; | |
169 | TREE_STATIC (global_var) = 1; | |
170 | TREE_USED (global_var) = 1; | |
171 | DECL_CONTEXT (global_var) = NULL_TREE; | |
172 | TREE_THIS_VOLATILE (global_var) = 0; | |
173 | TREE_ADDRESSABLE (global_var) = 0; | |
5084b2e4 | 174 | VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1; |
dd277d48 | 175 | |
5084b2e4 | 176 | fn->gimple_df->vop = global_var; |
dadb7503 | 177 | } |
dadb7503 | 178 | |
dd277d48 | 179 | /* These are the sizes of the operand memory buffer in bytes which gets |
180 | allocated each time more operands space is required. The final value is | |
181 | the amount that is allocated every time after that. | |
182 | In 1k we can fit 25 use operands (or 63 def operands) on a host with | |
183 | 8 byte pointers, that would be 10 statements each with 1 def and 2 | |
184 | uses. */ | |
48e1416a | 185 | |
dadb7503 | 186 | #define OP_SIZE_INIT 0 |
dd277d48 | 187 | #define OP_SIZE_1 (1024 - sizeof (void *)) |
188 | #define OP_SIZE_2 (1024 * 4 - sizeof (void *)) | |
189 | #define OP_SIZE_3 (1024 * 16 - sizeof (void *)) | |
dadb7503 | 190 | |
b66731e8 | 191 | /* Initialize the operand cache routines. */ |
192 | ||
193 | void | |
5084b2e4 | 194 | init_ssa_operands (struct function *fn) |
b66731e8 | 195 | { |
fcbe34ba | 196 | if (!n_initialized++) |
197 | { | |
f1f41a6c | 198 | build_uses.create (10); |
dd277d48 | 199 | build_vuse = NULL_TREE; |
200 | build_vdef = NULL_TREE; | |
363d040e | 201 | bitmap_obstack_initialize (&operands_bitmap_obstack); |
fcbe34ba | 202 | } |
203 | ||
5084b2e4 | 204 | gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL); |
205 | gimple_ssa_operands (fn)->operand_memory_index | |
206 | = gimple_ssa_operands (fn)->ssa_operand_mem_size; | |
207 | gimple_ssa_operands (fn)->ops_active = true; | |
208 | gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT; | |
209 | create_vop_var (fn); | |
b66731e8 | 210 | } |
4ee9c684 | 211 | |
5b110d39 | 212 | |
b66731e8 | 213 | /* Dispose of anything required by the operand routines. */ |
214 | ||
215 | void | |
216 | fini_ssa_operands (void) | |
217 | { | |
218 | struct ssa_operand_memory_d *ptr; | |
4fb5e5ca | 219 | |
fcbe34ba | 220 | if (!--n_initialized) |
221 | { | |
f1f41a6c | 222 | build_uses.release (); |
dd277d48 | 223 | build_vdef = NULL_TREE; |
224 | build_vuse = NULL_TREE; | |
fcbe34ba | 225 | } |
4fb5e5ca | 226 | |
fcbe34ba | 227 | gimple_ssa_operands (cfun)->free_uses = NULL; |
4fb5e5ca | 228 | |
fcbe34ba | 229 | while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL) |
b66731e8 | 230 | { |
fcbe34ba | 231 | gimple_ssa_operands (cfun)->operand_memory |
232 | = gimple_ssa_operands (cfun)->operand_memory->next; | |
b66731e8 | 233 | ggc_free (ptr); |
5b110d39 | 234 | } |
235 | ||
fcbe34ba | 236 | gimple_ssa_operands (cfun)->ops_active = false; |
4fb5e5ca | 237 | |
363d040e | 238 | if (!n_initialized) |
239 | bitmap_obstack_release (&operands_bitmap_obstack); | |
75a70cf9 | 240 | |
dd277d48 | 241 | cfun->gimple_df->vop = NULL_TREE; |
b66731e8 | 242 | } |
5b110d39 | 243 | |
4ee9c684 | 244 | |
dd277d48 | 245 | /* Return memory for an operand of size SIZE. */ |
48e1416a | 246 | |
b66731e8 | 247 | static inline void * |
248 | ssa_operand_alloc (unsigned size) | |
249 | { | |
250 | char *ptr; | |
4fb5e5ca | 251 | |
5bb6976b | 252 | gcc_assert (size == sizeof (struct use_optype_d)); |
dd277d48 | 253 | |
fcbe34ba | 254 | if (gimple_ssa_operands (cfun)->operand_memory_index + size |
363d040e | 255 | >= gimple_ssa_operands (cfun)->ssa_operand_mem_size) |
b66731e8 | 256 | { |
257 | struct ssa_operand_memory_d *ptr; | |
dadb7503 | 258 | |
dd277d48 | 259 | switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size) |
260 | { | |
261 | case OP_SIZE_INIT: | |
262 | gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1; | |
263 | break; | |
264 | case OP_SIZE_1: | |
265 | gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2; | |
266 | break; | |
267 | case OP_SIZE_2: | |
268 | case OP_SIZE_3: | |
269 | gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3; | |
270 | break; | |
271 | default: | |
272 | gcc_unreachable (); | |
273 | } | |
dadb7503 | 274 | |
ba72912a | 275 | |
276 | ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *) | |
277 | + gimple_ssa_operands (cfun)->ssa_operand_mem_size); | |
278 | ||
fcbe34ba | 279 | ptr->next = gimple_ssa_operands (cfun)->operand_memory; |
280 | gimple_ssa_operands (cfun)->operand_memory = ptr; | |
281 | gimple_ssa_operands (cfun)->operand_memory_index = 0; | |
b66731e8 | 282 | } |
dd277d48 | 283 | |
fcbe34ba | 284 | ptr = &(gimple_ssa_operands (cfun)->operand_memory |
285 | ->mem[gimple_ssa_operands (cfun)->operand_memory_index]); | |
286 | gimple_ssa_operands (cfun)->operand_memory_index += size; | |
b66731e8 | 287 | return ptr; |
4ee9c684 | 288 | } |
289 | ||
5b110d39 | 290 | |
dadb7503 | 291 | /* Allocate a USE operand. */ |
292 | ||
4fb5e5ca | 293 | static inline struct use_optype_d * |
294 | alloc_use (void) | |
295 | { | |
296 | struct use_optype_d *ret; | |
297 | if (gimple_ssa_operands (cfun)->free_uses) | |
298 | { | |
299 | ret = gimple_ssa_operands (cfun)->free_uses; | |
300 | gimple_ssa_operands (cfun)->free_uses | |
301 | = gimple_ssa_operands (cfun)->free_uses->next; | |
302 | } | |
303 | else | |
dadb7503 | 304 | ret = (struct use_optype_d *) |
305 | ssa_operand_alloc (sizeof (struct use_optype_d)); | |
4fb5e5ca | 306 | return ret; |
307 | } | |
308 | ||
309 | ||
dadb7503 | 310 | /* Adds OP to the list of uses of statement STMT after LAST. */ |
b5b59dda | 311 | |
4fb5e5ca | 312 | static inline use_optype_p |
75a70cf9 | 313 | add_use_op (gimple stmt, tree *op, use_optype_p last) |
b5b59dda | 314 | { |
f0d6e81c | 315 | use_optype_p new_use; |
316 | ||
317 | new_use = alloc_use (); | |
318 | USE_OP_PTR (new_use)->use = op; | |
319 | link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt); | |
320 | last->next = new_use; | |
321 | new_use->next = NULL; | |
322 | return new_use; | |
b5b59dda | 323 | } |
324 | ||
b5b59dda | 325 | |
b5b59dda | 326 | |
b5b59dda | 327 | /* Takes elements from build_defs and turns them into def operands of STMT. |
f1f41a6c | 328 | TODO -- Make build_defs vec of tree *. */ |
b5b59dda | 329 | |
330 | static inline void | |
75a70cf9 | 331 | finalize_ssa_defs (gimple stmt) |
b5b59dda | 332 | { |
dd277d48 | 333 | /* Pre-pend the vdef we may have built. */ |
334 | if (build_vdef != NULL_TREE) | |
335 | { | |
336 | tree oldvdef = gimple_vdef (stmt); | |
337 | if (oldvdef | |
338 | && TREE_CODE (oldvdef) == SSA_NAME) | |
339 | oldvdef = SSA_NAME_VAR (oldvdef); | |
340 | if (oldvdef != build_vdef) | |
341 | gimple_set_vdef (stmt, build_vdef); | |
dd277d48 | 342 | } |
343 | ||
dd277d48 | 344 | /* Clear and unlink a no longer necessary VDEF. */ |
345 | if (build_vdef == NULL_TREE | |
346 | && gimple_vdef (stmt) != NULL_TREE) | |
347 | { | |
348 | if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) | |
349 | { | |
350 | unlink_stmt_vdef (stmt); | |
351 | release_ssa_name (gimple_vdef (stmt)); | |
352 | } | |
353 | gimple_set_vdef (stmt, NULL_TREE); | |
354 | } | |
355 | ||
356 | /* If we have a non-SSA_NAME VDEF, mark it for renaming. */ | |
357 | if (gimple_vdef (stmt) | |
358 | && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME) | |
e70e8b13 | 359 | { |
360 | cfun->gimple_df->rename_vops = 1; | |
361 | cfun->gimple_df->ssa_renaming_needed = 1; | |
362 | } | |
b5b59dda | 363 | } |
b66731e8 | 364 | |
4ee9c684 | 365 | |
b5b59dda | 366 | /* Takes elements from build_uses and turns them into use operands of STMT. |
f1f41a6c | 367 | TODO -- Make build_uses vec of tree *. */ |
b5b59dda | 368 | |
369 | static inline void | |
75a70cf9 | 370 | finalize_ssa_uses (gimple stmt) |
b5b59dda | 371 | { |
372 | unsigned new_i; | |
373 | struct use_optype_d new_list; | |
374 | use_optype_p old_ops, ptr, last; | |
b5b59dda | 375 | |
dd277d48 | 376 | /* Pre-pend the VUSE we may have built. */ |
377 | if (build_vuse != NULL_TREE) | |
378 | { | |
379 | tree oldvuse = gimple_vuse (stmt); | |
380 | if (oldvuse | |
381 | && TREE_CODE (oldvuse) == SSA_NAME) | |
382 | oldvuse = SSA_NAME_VAR (oldvuse); | |
383 | if (oldvuse != (build_vuse != NULL_TREE | |
384 | ? build_vuse : build_vdef)) | |
385 | gimple_set_vuse (stmt, NULL_TREE); | |
f1f41a6c | 386 | build_uses.safe_insert (0, (tree)gimple_vuse_ptr (stmt)); |
dd277d48 | 387 | } |
388 | ||
b5b59dda | 389 | new_list.next = NULL; |
390 | last = &new_list; | |
391 | ||
75a70cf9 | 392 | old_ops = gimple_use_ops (stmt); |
b5b59dda | 393 | |
dd277d48 | 394 | /* Clear a no longer necessary VUSE. */ |
395 | if (build_vuse == NULL_TREE | |
396 | && gimple_vuse (stmt) != NULL_TREE) | |
397 | gimple_set_vuse (stmt, NULL_TREE); | |
398 | ||
b5b59dda | 399 | /* If there is anything in the old list, free it. */ |
400 | if (old_ops) | |
401 | { | |
402 | for (ptr = old_ops; ptr; ptr = ptr->next) | |
403 | delink_imm_use (USE_OP_PTR (ptr)); | |
fcbe34ba | 404 | old_ops->next = gimple_ssa_operands (cfun)->free_uses; |
405 | gimple_ssa_operands (cfun)->free_uses = old_ops; | |
b5b59dda | 406 | } |
407 | ||
dd277d48 | 408 | /* If we added a VUSE, make sure to set the operand if it is not already |
409 | present and mark it for renaming. */ | |
410 | if (build_vuse != NULL_TREE | |
411 | && gimple_vuse (stmt) == NULL_TREE) | |
412 | { | |
413 | gimple_set_vuse (stmt, gimple_vop (cfun)); | |
e70e8b13 | 414 | cfun->gimple_df->rename_vops = 1; |
415 | cfun->gimple_df->ssa_renaming_needed = 1; | |
dd277d48 | 416 | } |
417 | ||
09aca5bc | 418 | /* Now create nodes for all the new nodes. */ |
f1f41a6c | 419 | for (new_i = 0; new_i < build_uses.length (); new_i++) |
e70e8b13 | 420 | { |
f1f41a6c | 421 | tree *op = (tree *) build_uses[new_i]; |
e70e8b13 | 422 | last = add_use_op (stmt, op, last); |
423 | } | |
09aca5bc | 424 | |
b5b59dda | 425 | /* Now set the stmt's operands. */ |
75a70cf9 | 426 | gimple_set_use_ops (stmt, new_list.next); |
4ee9c684 | 427 | } |
5b110d39 | 428 | |
4fb5e5ca | 429 | |
430 | /* Clear the in_list bits and empty the build array for VDEFs and | |
431 | VUSEs. */ | |
b5b59dda | 432 | |
433 | static inline void | |
4fb5e5ca | 434 | cleanup_build_arrays (void) |
b5b59dda | 435 | { |
dd277d48 | 436 | build_vdef = NULL_TREE; |
437 | build_vuse = NULL_TREE; | |
f1f41a6c | 438 | build_uses.truncate (0); |
2cf24776 | 439 | } |
440 | ||
4ee9c684 | 441 | |
5b110d39 | 442 | /* Finalize all the build vectors, fill the new ones into INFO. */ |
48e1416a | 443 | |
5b110d39 | 444 | static inline void |
75a70cf9 | 445 | finalize_ssa_stmt_operands (gimple stmt) |
5b110d39 | 446 | { |
b66731e8 | 447 | finalize_ssa_defs (stmt); |
448 | finalize_ssa_uses (stmt); | |
4fb5e5ca | 449 | cleanup_build_arrays (); |
4ee9c684 | 450 | } |
451 | ||
452 | ||
5b110d39 | 453 | /* Start the process of building up operands vectors in INFO. */ |
454 | ||
455 | static inline void | |
456 | start_ssa_stmt_operands (void) | |
4ee9c684 | 457 | { |
f1f41a6c | 458 | gcc_assert (build_uses.length () == 0); |
dd277d48 | 459 | gcc_assert (build_vuse == NULL_TREE); |
460 | gcc_assert (build_vdef == NULL_TREE); | |
4ee9c684 | 461 | } |
462 | ||
463 | ||
5b110d39 | 464 | /* Add USE_P to the list of pointers to operands. */ |
4ee9c684 | 465 | |
466 | static inline void | |
5b110d39 | 467 | append_use (tree *use_p) |
4ee9c684 | 468 | { |
f1f41a6c | 469 | build_uses.safe_push ((tree) use_p); |
4ee9c684 | 470 | } |
471 | ||
472 | ||
4fb5e5ca | 473 | /* Add VAR to the set of variables that require a VDEF operator. */ |
4ee9c684 | 474 | |
5b110d39 | 475 | static inline void |
4fb5e5ca | 476 | append_vdef (tree var) |
4ee9c684 | 477 | { |
17fbf1b8 | 478 | if (!optimize) |
479 | return; | |
480 | ||
dd277d48 | 481 | gcc_assert ((build_vdef == NULL_TREE |
482 | || build_vdef == var) | |
483 | && (build_vuse == NULL_TREE | |
484 | || build_vuse == var)); | |
4fb5e5ca | 485 | |
dd277d48 | 486 | build_vdef = var; |
487 | build_vuse = var; | |
4ee9c684 | 488 | } |
489 | ||
490 | ||
4fb5e5ca | 491 | /* Add VAR to the set of variables that require a VUSE operator. */ |
4ee9c684 | 492 | |
5b110d39 | 493 | static inline void |
494 | append_vuse (tree var) | |
4ee9c684 | 495 | { |
17fbf1b8 | 496 | if (!optimize) |
497 | return; | |
498 | ||
dd277d48 | 499 | gcc_assert (build_vuse == NULL_TREE |
500 | || build_vuse == var); | |
4ee9c684 | 501 | |
dd277d48 | 502 | build_vuse = var; |
22aa74c4 | 503 | } |
504 | ||
dd277d48 | 505 | /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ |
f0e6e3c1 | 506 | |
dd277d48 | 507 | static void |
508 | add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags) | |
509 | { | |
510 | /* Add virtual operands to the stmt, unless the caller has specifically | |
511 | requested not to do that (used when adding operands inside an | |
512 | ADDR_EXPR expression). */ | |
513 | if (flags & opf_no_vops) | |
514 | return; | |
515 | ||
9845d120 | 516 | gcc_assert (!is_gimple_debug (stmt)); |
517 | ||
dd277d48 | 518 | if (flags & opf_def) |
519 | append_vdef (gimple_vop (cfun)); | |
520 | else | |
521 | append_vuse (gimple_vop (cfun)); | |
b66731e8 | 522 | } |
523 | ||
b66731e8 | 524 | |
75a70cf9 | 525 | /* Add *VAR_P to the appropriate operand array for statement STMT. |
526 | FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, | |
527 | it will be added to the statement's real operands, otherwise it is | |
528 | added to virtual operands. */ | |
fa999566 | 529 | |
530 | static void | |
75a70cf9 | 531 | add_stmt_operand (tree *var_p, gimple stmt, int flags) |
b66731e8 | 532 | { |
2f4ec87c | 533 | tree var = *var_p; |
b66731e8 | 534 | |
75a70cf9 | 535 | gcc_assert (SSA_VAR_P (*var_p)); |
b66731e8 | 536 | |
2f4ec87c | 537 | if (is_gimple_reg (var)) |
b66731e8 | 538 | { |
fa999566 | 539 | /* The variable is a GIMPLE register. Add it to real operands. */ |
4fb5e5ca | 540 | if (flags & opf_def) |
5bb6976b | 541 | ; |
fa999566 | 542 | else |
543 | append_use (var_p); | |
5bb6976b | 544 | if (DECL_P (*var_p)) |
545 | cfun->gimple_df->ssa_renaming_needed = 1; | |
b66731e8 | 546 | } |
fa999566 | 547 | else |
2f4ec87c | 548 | { |
549 | /* Mark statements with volatile operands. */ | |
550 | if (!(flags & opf_no_vops) | |
551 | && TREE_THIS_VOLATILE (var)) | |
552 | gimple_set_has_volatile_ops (stmt, true); | |
553 | ||
554 | /* The variable is a memory access. Add virtual operands. */ | |
555 | add_virtual_operand (stmt, flags); | |
556 | } | |
fa999566 | 557 | } |
b66731e8 | 558 | |
6d5ec6f8 | 559 | /* Mark the base address of REF as having its address taken. |
560 | REF may be a single variable whose address has been taken or any | |
561 | other valid GIMPLE memory reference (structure reference, array, | |
562 | etc). */ | |
b66731e8 | 563 | |
fa999566 | 564 | static void |
6d5ec6f8 | 565 | mark_address_taken (tree ref) |
4ec25329 | 566 | { |
dd277d48 | 567 | tree var; |
b66731e8 | 568 | |
dd277d48 | 569 | /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF |
570 | as the only thing we take the address of. If VAR is a structure, | |
571 | taking the address of a field means that the whole structure may | |
572 | be referenced using pointer arithmetic. See PR 21407 and the | |
573 | ensuing mailing list discussion. */ | |
574 | var = get_base_address (ref); | |
182cf5a9 | 575 | if (var) |
576 | { | |
577 | if (DECL_P (var)) | |
578 | TREE_ADDRESSABLE (var) = 1; | |
579 | else if (TREE_CODE (var) == MEM_REF | |
580 | && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR | |
581 | && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0))) | |
582 | TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1; | |
583 | } | |
22aa74c4 | 584 | } |
585 | ||
4ec25329 | 586 | |
5d9de213 | 587 | /* A subroutine of get_expr_operands to handle MEM_REF. |
cb7f680b | 588 | |
182cf5a9 | 589 | STMT is the statement being processed, EXPR is the MEM_REF |
cb7f680b | 590 | that got us here. |
48e1416a | 591 | |
5bb6976b | 592 | FLAGS is as in get_expr_operands. */ |
cb7f680b | 593 | |
594 | static void | |
5bb6976b | 595 | get_indirect_ref_operands (gimple stmt, tree expr, int flags) |
cb7f680b | 596 | { |
597 | tree *pptr = &TREE_OPERAND (expr, 0); | |
cb7f680b | 598 | |
587838bb | 599 | if (!(flags & opf_no_vops) |
600 | && TREE_THIS_VOLATILE (expr)) | |
75a70cf9 | 601 | gimple_set_has_volatile_ops (stmt, true); |
cb7f680b | 602 | |
dd277d48 | 603 | /* Add the VOP. */ |
604 | add_virtual_operand (stmt, flags); | |
605 | ||
606 | /* If requested, add a USE operand for the base pointer. */ | |
5bb6976b | 607 | get_expr_operands (stmt, pptr, |
608 | opf_non_addressable | opf_use | |
609 | | (flags & (opf_no_vops|opf_not_non_addressable))); | |
cb7f680b | 610 | } |
a002e999 | 611 | |
4ec25329 | 612 | |
fa999566 | 613 | /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ |
4ee9c684 | 614 | |
615 | static void | |
75a70cf9 | 616 | get_tmr_operands (gimple stmt, tree expr, int flags) |
4ee9c684 | 617 | { |
587838bb | 618 | if (!(flags & opf_no_vops) |
619 | && TREE_THIS_VOLATILE (expr)) | |
9c44b395 | 620 | gimple_set_has_volatile_ops (stmt, true); |
621 | ||
4fb5e5ca | 622 | /* First record the real operands. */ |
afcada6e | 623 | get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops)); |
624 | get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops)); | |
28daba6f | 625 | get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops)); |
4ee9c684 | 626 | |
dd277d48 | 627 | add_virtual_operand (stmt, flags); |
fa999566 | 628 | } |
629 | ||
630 | ||
75a70cf9 | 631 | /* If STMT is a call that may clobber globals and other symbols that |
632 | escape, add them to the VDEF/VUSE lists for it. */ | |
fa999566 | 633 | |
634 | static void | |
dd277d48 | 635 | maybe_add_call_vops (gimple stmt) |
fa999566 | 636 | { |
75a70cf9 | 637 | int call_flags = gimple_call_flags (stmt); |
fa999566 | 638 | |
4fb5e5ca | 639 | /* If aliases have been computed already, add VDEF or VUSE |
fa999566 | 640 | operands for all the symbols that have been found to be |
4fb5e5ca | 641 | call-clobbered. */ |
dd277d48 | 642 | if (!(call_flags & ECF_NOVOPS)) |
fa999566 | 643 | { |
48e1416a | 644 | /* A 'pure' or a 'const' function never call-clobbers anything. |
645 | A 'noreturn' function might, but since we don't return anyway | |
646 | there is no point in recording that. */ | |
75a70cf9 | 647 | if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) |
dd277d48 | 648 | add_virtual_operand (stmt, opf_def); |
fa999566 | 649 | else if (!(call_flags & ECF_CONST)) |
dd277d48 | 650 | add_virtual_operand (stmt, opf_use); |
fa999566 | 651 | } |
fa999566 | 652 | } |
653 | ||
654 | ||
655 | /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ | |
656 | ||
657 | static void | |
75a70cf9 | 658 | get_asm_expr_operands (gimple stmt) |
fa999566 | 659 | { |
75a70cf9 | 660 | size_t i, noutputs; |
4fb5e5ca | 661 | const char **oconstraints; |
fa999566 | 662 | const char *constraint; |
663 | bool allows_mem, allows_reg, is_inout; | |
4fb5e5ca | 664 | |
75a70cf9 | 665 | noutputs = gimple_asm_noutputs (stmt); |
4fb5e5ca | 666 | oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); |
fa999566 | 667 | |
4fb5e5ca | 668 | /* Gather all output operands. */ |
75a70cf9 | 669 | for (i = 0; i < gimple_asm_noutputs (stmt); i++) |
fa999566 | 670 | { |
75a70cf9 | 671 | tree link = gimple_asm_output_op (stmt, i); |
f6255040 | 672 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
673 | oconstraints[i] = constraint; | |
674 | parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
675 | &allows_reg, &is_inout); | |
fa999566 | 676 | |
677 | /* This should have been split in gimplify_asm_expr. */ | |
678 | gcc_assert (!allows_reg || !is_inout); | |
679 | ||
680 | /* Memory operands are addressable. Note that STMT needs the | |
681 | address of this operand. */ | |
682 | if (!allows_reg && allows_mem) | |
7f2d9047 | 683 | mark_address_taken (TREE_VALUE (link)); |
fa999566 | 684 | |
182cf5a9 | 685 | get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable); |
fa999566 | 686 | } |
687 | ||
4fb5e5ca | 688 | /* Gather all input operands. */ |
75a70cf9 | 689 | for (i = 0; i < gimple_asm_ninputs (stmt); i++) |
fa999566 | 690 | { |
75a70cf9 | 691 | tree link = gimple_asm_input_op (stmt, i); |
fa999566 | 692 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
4fb5e5ca | 693 | parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, |
694 | &allows_mem, &allows_reg); | |
fa999566 | 695 | |
696 | /* Memory operands are addressable. Note that STMT needs the | |
697 | address of this operand. */ | |
698 | if (!allows_reg && allows_mem) | |
7f2d9047 | 699 | mark_address_taken (TREE_VALUE (link)); |
fa999566 | 700 | |
182cf5a9 | 701 | get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable); |
fa999566 | 702 | } |
703 | ||
4fb5e5ca | 704 | /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ |
97cf41ec | 705 | if (gimple_asm_clobbers_memory_p (stmt)) |
706 | add_virtual_operand (stmt, opf_def); | |
f6255040 | 707 | } |
708 | ||
709 | ||
fa999566 | 710 | /* Recursively scan the expression pointed to by EXPR_P in statement |
f6255040 | 711 | STMT. FLAGS is one of the OPF_* constants modifying how to |
712 | interpret the operands found. */ | |
fa999566 | 713 | |
714 | static void | |
75a70cf9 | 715 | get_expr_operands (gimple stmt, tree *expr_p, int flags) |
fa999566 | 716 | { |
717 | enum tree_code code; | |
f0d6e81c | 718 | enum tree_code_class codeclass; |
fa999566 | 719 | tree expr = *expr_p; |
9845d120 | 720 | int uflags = opf_use; |
fa999566 | 721 | |
722 | if (expr == NULL) | |
723 | return; | |
724 | ||
9845d120 | 725 | if (is_gimple_debug (stmt)) |
726 | uflags |= (flags & opf_no_vops); | |
727 | ||
fa999566 | 728 | code = TREE_CODE (expr); |
f0d6e81c | 729 | codeclass = TREE_CODE_CLASS (code); |
fa999566 | 730 | |
731 | switch (code) | |
732 | { | |
733 | case ADDR_EXPR: | |
734 | /* Taking the address of a variable does not represent a | |
735 | reference to it, but the fact that the statement takes its | |
736 | address will be of interest to some passes (e.g. alias | |
737 | resolution). */ | |
182cf5a9 | 738 | if ((!(flags & opf_non_addressable) |
739 | || (flags & opf_not_non_addressable)) | |
740 | && !is_gimple_debug (stmt)) | |
9845d120 | 741 | mark_address_taken (TREE_OPERAND (expr, 0)); |
fa999566 | 742 | |
743 | /* If the address is invariant, there may be no interesting | |
744 | variable references inside. */ | |
745 | if (is_gimple_min_invariant (expr)) | |
746 | return; | |
747 | ||
748 | /* Otherwise, there may be variables referenced inside but there | |
749 | should be no VUSEs created, since the referenced objects are | |
750 | not really accessed. The only operands that we should find | |
751 | here are ARRAY_REF indices which will always be real operands | |
752 | (GIMPLE does not allow non-registers as array indices). */ | |
753 | flags |= opf_no_vops; | |
182cf5a9 | 754 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), |
755 | flags | opf_not_non_addressable); | |
fa999566 | 756 | return; |
757 | ||
758 | case SSA_NAME: | |
fa999566 | 759 | case VAR_DECL: |
760 | case PARM_DECL: | |
761 | case RESULT_DECL: | |
75a70cf9 | 762 | add_stmt_operand (expr_p, stmt, flags); |
2afb4be3 | 763 | return; |
fa999566 | 764 | |
688ff29b | 765 | case DEBUG_EXPR_DECL: |
766 | gcc_assert (gimple_debug_bind_p (stmt)); | |
767 | return; | |
768 | ||
182cf5a9 | 769 | case MEM_REF: |
5bb6976b | 770 | get_indirect_ref_operands (stmt, expr, flags); |
fa999566 | 771 | return; |
772 | ||
773 | case TARGET_MEM_REF: | |
774 | get_tmr_operands (stmt, expr, flags); | |
775 | return; | |
776 | ||
fa999566 | 777 | case ARRAY_REF: |
f6255040 | 778 | case ARRAY_RANGE_REF: |
fa999566 | 779 | case COMPONENT_REF: |
780 | case REALPART_EXPR: | |
781 | case IMAGPART_EXPR: | |
782 | { | |
587838bb | 783 | if (!(flags & opf_no_vops) |
784 | && TREE_THIS_VOLATILE (expr)) | |
75a70cf9 | 785 | gimple_set_has_volatile_ops (stmt, true); |
8e4c4d3b | 786 | |
4fb5e5ca | 787 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
48e1416a | 788 | |
2be14d8b | 789 | if (code == COMPONENT_REF) |
7fecfde9 | 790 | { |
587838bb | 791 | if (!(flags & opf_no_vops) |
792 | && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) | |
75a70cf9 | 793 | gimple_set_has_volatile_ops (stmt, true); |
9845d120 | 794 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); |
7fecfde9 | 795 | } |
f6255040 | 796 | else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) |
03c253f3 | 797 | { |
9845d120 | 798 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); |
799 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); | |
800 | get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags); | |
03c253f3 | 801 | } |
a002e999 | 802 | |
2be14d8b | 803 | return; |
804 | } | |
a002e999 | 805 | |
80f06481 | 806 | case WITH_SIZE_EXPR: |
454b4e1f | 807 | /* WITH_SIZE_EXPR is a pass-through reference to its first argument, |
80f06481 | 808 | and an rvalue reference to its second argument. */ |
9845d120 | 809 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); |
5b110d39 | 810 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
80f06481 | 811 | return; |
812 | ||
07c03fb0 | 813 | case COND_EXPR: |
bd2ec699 | 814 | case VEC_COND_EXPR: |
f4803722 | 815 | case VEC_PERM_EXPR: |
9845d120 | 816 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags); |
817 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); | |
818 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); | |
07c03fb0 | 819 | return; |
820 | ||
f9c6943b | 821 | case CONSTRUCTOR: |
822 | { | |
823 | /* General aggregate CONSTRUCTORs have been decomposed, but they | |
824 | are still in use as the COMPLEX_EXPR equivalent for vectors. */ | |
c75b4594 | 825 | constructor_elt *ce; |
826 | unsigned HOST_WIDE_INT idx; | |
f9c6943b | 827 | |
3c25489e | 828 | /* A volatile constructor is actually TREE_CLOBBER_P, transfer |
829 | the volatility to the statement, don't use TREE_CLOBBER_P for | |
830 | mirroring the other uses of THIS_VOLATILE in this file. */ | |
587838bb | 831 | if (!(flags & opf_no_vops) |
832 | && TREE_THIS_VOLATILE (expr)) | |
3c25489e | 833 | gimple_set_has_volatile_ops (stmt, true); |
834 | ||
c75b4594 | 835 | for (idx = 0; |
f1f41a6c | 836 | vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce); |
c75b4594 | 837 | idx++) |
9845d120 | 838 | get_expr_operands (stmt, &ce->value, uflags); |
f9c6943b | 839 | |
840 | return; | |
841 | } | |
842 | ||
c9a1e1e0 | 843 | case BIT_FIELD_REF: |
587838bb | 844 | if (!(flags & opf_no_vops) |
845 | && TREE_THIS_VOLATILE (expr)) | |
1e342984 | 846 | gimple_set_has_volatile_ops (stmt, true); |
847 | /* FALLTHRU */ | |
848 | ||
2c0bc8ce | 849 | case VIEW_CONVERT_EXPR: |
c9a1e1e0 | 850 | do_unary: |
5b110d39 | 851 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
4ee9c684 | 852 | return; |
4ee9c684 | 853 | |
c9a1e1e0 | 854 | case COMPOUND_EXPR: |
855 | case OBJ_TYPE_REF: | |
88dbf20f | 856 | case ASSERT_EXPR: |
c9a1e1e0 | 857 | do_binary: |
858 | { | |
5b110d39 | 859 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
860 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
c9a1e1e0 | 861 | return; |
862 | } | |
863 | ||
4a61a337 | 864 | case DOT_PROD_EXPR: |
b056d812 | 865 | case REALIGN_LOAD_EXPR: |
00f4f705 | 866 | case WIDEN_MULT_PLUS_EXPR: |
867 | case WIDEN_MULT_MINUS_EXPR: | |
156f51b9 | 868 | case FMA_EXPR: |
b056d812 | 869 | { |
870 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
00f4f705 | 871 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); |
872 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags); | |
873 | return; | |
b056d812 | 874 | } |
875 | ||
c9a1e1e0 | 876 | case FUNCTION_DECL: |
c9a1e1e0 | 877 | case LABEL_DECL: |
bef99423 | 878 | case CONST_DECL: |
75a70cf9 | 879 | case CASE_LABEL_EXPR: |
fa999566 | 880 | /* Expressions that make no memory references. */ |
c9a1e1e0 | 881 | return; |
fa999566 | 882 | |
883 | default: | |
f0d6e81c | 884 | if (codeclass == tcc_unary) |
fa999566 | 885 | goto do_unary; |
f0d6e81c | 886 | if (codeclass == tcc_binary || codeclass == tcc_comparison) |
fa999566 | 887 | goto do_binary; |
f0d6e81c | 888 | if (codeclass == tcc_constant || codeclass == tcc_type) |
fa999566 | 889 | return; |
a002e999 | 890 | } |
c9a1e1e0 | 891 | |
fa999566 | 892 | /* If we get here, something has gone wrong. */ |
893 | #ifdef ENABLE_CHECKING | |
894 | fprintf (stderr, "unhandled expression in get_expr_operands():\n"); | |
895 | debug_tree (expr); | |
896 | fputs ("\n", stderr); | |
897 | #endif | |
898 | gcc_unreachable (); | |
c9a1e1e0 | 899 | } |
900 | ||
a002e999 | 901 | |
f6255040 | 902 | /* Parse STMT looking for operands. When finished, the various |
903 | build_* operand vectors will have potential operands in them. */ | |
904 | ||
aed164c3 | 905 | static void |
75a70cf9 | 906 | parse_ssa_operands (gimple stmt) |
aed164c3 | 907 | { |
75a70cf9 | 908 | enum gimple_code code = gimple_code (stmt); |
b65fbe25 | 909 | size_t i, n, start = 0; |
aed164c3 | 910 | |
b65fbe25 | 911 | switch (code) |
9845d120 | 912 | { |
b65fbe25 | 913 | case GIMPLE_ASM: |
914 | get_asm_expr_operands (stmt); | |
915 | break; | |
916 | ||
917 | case GIMPLE_TRANSACTION: | |
918 | /* The start of a transaction is a memory barrier. */ | |
919 | add_virtual_operand (stmt, opf_def | opf_use); | |
920 | break; | |
921 | ||
922 | case GIMPLE_DEBUG: | |
9845d120 | 923 | if (gimple_debug_bind_p (stmt) |
924 | && gimple_debug_bind_has_value_p (stmt)) | |
925 | get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt), | |
926 | opf_use | opf_no_vops); | |
b65fbe25 | 927 | break; |
fa999566 | 928 | |
b65fbe25 | 929 | case GIMPLE_RETURN: |
930 | append_vuse (gimple_vop (cfun)); | |
931 | goto do_default; | |
fa999566 | 932 | |
b65fbe25 | 933 | case GIMPLE_CALL: |
75a70cf9 | 934 | /* Add call-clobbered operands, if needed. */ |
b65fbe25 | 935 | maybe_add_call_vops (stmt); |
936 | /* FALLTHRU */ | |
2109076a | 937 | |
b65fbe25 | 938 | case GIMPLE_ASSIGN: |
939 | get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def); | |
940 | start = 1; | |
941 | /* FALLTHRU */ | |
942 | ||
943 | default: | |
944 | do_default: | |
945 | n = gimple_num_ops (stmt); | |
946 | for (i = start; i < n; i++) | |
947 | get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use); | |
948 | break; | |
ca9c9daf | 949 | } |
aed164c3 | 950 | } |
951 | ||
a002e999 | 952 | |
fa999566 | 953 | /* Create an operands cache for STMT. */ |
c9a1e1e0 | 954 | |
955 | static void | |
75a70cf9 | 956 | build_ssa_operands (gimple stmt) |
c9a1e1e0 | 957 | { |
6d5ec6f8 | 958 | /* Initially assume that the statement has no volatile operands. */ |
75a70cf9 | 959 | gimple_set_has_volatile_ops (stmt, false); |
75a70cf9 | 960 | |
fa999566 | 961 | start_ssa_stmt_operands (); |
fa999566 | 962 | parse_ssa_operands (stmt); |
fa999566 | 963 | finalize_ssa_stmt_operands (stmt); |
964 | } | |
39b644e9 | 965 | |
85f3d834 | 966 | /* Verifies SSA statement operands. */ |
967 | ||
968 | DEBUG_FUNCTION bool | |
969 | verify_ssa_operands (gimple stmt) | |
970 | { | |
971 | use_operand_p use_p; | |
972 | def_operand_p def_p; | |
973 | ssa_op_iter iter; | |
974 | unsigned i; | |
975 | tree use, def; | |
976 | bool volatile_p = gimple_has_volatile_ops (stmt); | |
977 | ||
978 | /* build_ssa_operands w/o finalizing them. */ | |
979 | gimple_set_has_volatile_ops (stmt, false); | |
980 | start_ssa_stmt_operands (); | |
981 | parse_ssa_operands (stmt); | |
982 | ||
983 | /* Now verify the built operands are the same as present in STMT. */ | |
984 | def = gimple_vdef (stmt); | |
985 | if (def | |
986 | && TREE_CODE (def) == SSA_NAME) | |
987 | def = SSA_NAME_VAR (def); | |
988 | if (build_vdef != def) | |
989 | { | |
990 | error ("virtual definition of statement not up-to-date"); | |
991 | return true; | |
992 | } | |
993 | if (gimple_vdef (stmt) | |
994 | && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P | |
995 | || DEF_FROM_PTR (def_p) != gimple_vdef (stmt))) | |
996 | { | |
997 | error ("virtual def operand missing for stmt"); | |
998 | return true; | |
999 | } | |
1000 | ||
1001 | use = gimple_vuse (stmt); | |
1002 | if (use | |
1003 | && TREE_CODE (use) == SSA_NAME) | |
1004 | use = SSA_NAME_VAR (use); | |
1005 | if (build_vuse != use) | |
1006 | { | |
1007 | error ("virtual use of statement not up-to-date"); | |
1008 | return true; | |
1009 | } | |
1010 | if (gimple_vuse (stmt) | |
1011 | && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P | |
1012 | || USE_FROM_PTR (use_p) != gimple_vuse (stmt))) | |
1013 | { | |
1014 | error ("virtual use operand missing for stmt"); | |
1015 | return true; | |
1016 | } | |
1017 | ||
1018 | FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) | |
1019 | { | |
f1f41a6c | 1020 | FOR_EACH_VEC_ELT (build_uses, i, use) |
85f3d834 | 1021 | { |
1022 | if (use_p->use == (tree *)use) | |
1023 | { | |
f1f41a6c | 1024 | build_uses[i] = NULL_TREE; |
85f3d834 | 1025 | break; |
1026 | } | |
1027 | } | |
f1f41a6c | 1028 | if (i == build_uses.length ()) |
85f3d834 | 1029 | { |
1030 | error ("excess use operand for stmt"); | |
1031 | debug_generic_expr (USE_FROM_PTR (use_p)); | |
1032 | return true; | |
1033 | } | |
1034 | } | |
f1f41a6c | 1035 | FOR_EACH_VEC_ELT (build_uses, i, use) |
85f3d834 | 1036 | if (use != NULL_TREE) |
1037 | { | |
1038 | error ("use operand missing for stmt"); | |
1039 | debug_generic_expr (*(tree *)use); | |
1040 | return true; | |
1041 | } | |
1042 | ||
85f3d834 | 1043 | if (gimple_has_volatile_ops (stmt) != volatile_p) |
1044 | { | |
1045 | error ("stmt volatile flag not up-to-date"); | |
1046 | return true; | |
1047 | } | |
1048 | ||
1049 | cleanup_build_arrays (); | |
1050 | return false; | |
1051 | } | |
1052 | ||
4ec25329 | 1053 | |
28c92cbb | 1054 | /* Releases the operands of STMT back to their freelists, and clears |
1055 | the stmt operand lists. */ | |
1056 | ||
1057 | void | |
75a70cf9 | 1058 | free_stmt_operands (gimple stmt) |
28c92cbb | 1059 | { |
75a70cf9 | 1060 | use_optype_p uses = gimple_use_ops (stmt), last_use; |
28c92cbb | 1061 | |
28c92cbb | 1062 | if (uses) |
1063 | { | |
1064 | for (last_use = uses; last_use->next; last_use = last_use->next) | |
1065 | delink_imm_use (USE_OP_PTR (last_use)); | |
1066 | delink_imm_use (USE_OP_PTR (last_use)); | |
1067 | last_use->next = gimple_ssa_operands (cfun)->free_uses; | |
1068 | gimple_ssa_operands (cfun)->free_uses = uses; | |
75a70cf9 | 1069 | gimple_set_use_ops (stmt, NULL); |
28c92cbb | 1070 | } |
1071 | ||
75a70cf9 | 1072 | if (gimple_has_mem_ops (stmt)) |
1073 | { | |
dd277d48 | 1074 | gimple_set_vuse (stmt, NULL_TREE); |
1075 | gimple_set_vdef (stmt, NULL_TREE); | |
75a70cf9 | 1076 | } |
c9a1e1e0 | 1077 | } |
1078 | ||
0b3f639d | 1079 | |
7dd75889 | 1080 | /* Get the operands of statement STMT. */ |
a002e999 | 1081 | |
fa999566 | 1082 | void |
75a70cf9 | 1083 | update_stmt_operands (gimple stmt) |
fa999566 | 1084 | { |
f6255040 | 1085 | /* If update_stmt_operands is called before SSA is initialized, do |
1086 | nothing. */ | |
8d672d12 | 1087 | if (!ssa_operands_active (cfun)) |
fa999566 | 1088 | return; |
2b99acb8 | 1089 | |
fa999566 | 1090 | timevar_push (TV_TREE_OPS); |
2b99acb8 | 1091 | |
e1f47fd3 | 1092 | /* If the stmt is a noreturn call queue it to be processed by |
1093 | split_bbs_on_noreturn_calls during cfg cleanup. */ | |
1094 | if (is_gimple_call (stmt) | |
1095 | && gimple_call_noreturn_p (stmt)) | |
f1f41a6c | 1096 | vec_safe_push (MODIFIED_NORETURN_CALLS (cfun), stmt); |
e1f47fd3 | 1097 | |
75a70cf9 | 1098 | gcc_assert (gimple_modified_p (stmt)); |
fa999566 | 1099 | build_ssa_operands (stmt); |
75a70cf9 | 1100 | gimple_set_modified (stmt, false); |
4ee9c684 | 1101 | |
fa999566 | 1102 | timevar_pop (TV_TREE_OPS); |
1103 | } | |
b0b70f22 | 1104 | |
f6255040 | 1105 | |
fa999566 | 1106 | /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done |
1107 | to test the validity of the swap operation. */ | |
b0b70f22 | 1108 | |
fa999566 | 1109 | void |
8f6fa493 | 1110 | swap_ssa_operands (gimple stmt, tree *exp0, tree *exp1) |
fa999566 | 1111 | { |
1112 | tree op0, op1; | |
1113 | op0 = *exp0; | |
1114 | op1 = *exp1; | |
0b3f639d | 1115 | |
8f6fa493 | 1116 | gcc_checking_assert (ssa_operands_active (cfun)); |
1117 | ||
1118 | if (op0 != op1) | |
fa999566 | 1119 | { |
8f6fa493 | 1120 | /* Attempt to preserve the relative positions of these two operands in |
1121 | their * respective immediate use lists by adjusting their use pointer | |
1122 | to point to the new operand position. */ | |
fa999566 | 1123 | use_optype_p use0, use1, ptr; |
1124 | use0 = use1 = NULL; | |
0b3f639d | 1125 | |
fa999566 | 1126 | /* Find the 2 operands in the cache, if they are there. */ |
75a70cf9 | 1127 | for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) |
fa999566 | 1128 | if (USE_OP_PTR (ptr)->use == exp0) |
1129 | { | |
1130 | use0 = ptr; | |
1131 | break; | |
1132 | } | |
0b3f639d | 1133 | |
75a70cf9 | 1134 | for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) |
fa999566 | 1135 | if (USE_OP_PTR (ptr)->use == exp1) |
1136 | { | |
1137 | use1 = ptr; | |
1138 | break; | |
1139 | } | |
1140 | ||
f3f02af0 | 1141 | /* And adjust their location to point to the new position of the |
1142 | operand. */ | |
1143 | if (use0) | |
1144 | USE_OP_PTR (use0)->use = exp1; | |
1145 | if (use1) | |
1146 | USE_OP_PTR (use1)->use = exp0; | |
fa999566 | 1147 | |
8f6fa493 | 1148 | /* Now swap the data. */ |
1149 | *exp0 = op1; | |
1150 | *exp1 = op0; | |
1151 | } | |
0b3f639d | 1152 | } |
1153 | ||
75a70cf9 | 1154 | |
22aa74c4 | 1155 | /* Scan the immediate_use list for VAR making sure its linked properly. |
f6255040 | 1156 | Return TRUE if there is a problem and emit an error message to F. */ |
22aa74c4 | 1157 | |
4b987fac | 1158 | DEBUG_FUNCTION bool |
22aa74c4 | 1159 | verify_imm_links (FILE *f, tree var) |
1160 | { | |
b66731e8 | 1161 | use_operand_p ptr, prev, list; |
22aa74c4 | 1162 | int count; |
1163 | ||
1164 | gcc_assert (TREE_CODE (var) == SSA_NAME); | |
1165 | ||
1166 | list = &(SSA_NAME_IMM_USE_NODE (var)); | |
1167 | gcc_assert (list->use == NULL); | |
1168 | ||
1169 | if (list->prev == NULL) | |
1170 | { | |
1171 | gcc_assert (list->next == NULL); | |
1172 | return false; | |
1173 | } | |
1174 | ||
1175 | prev = list; | |
1176 | count = 0; | |
1177 | for (ptr = list->next; ptr != list; ) | |
1178 | { | |
1179 | if (prev != ptr->prev) | |
1fa3a8f6 | 1180 | goto error; |
48e1416a | 1181 | |
22aa74c4 | 1182 | if (ptr->use == NULL) |
1fa3a8f6 | 1183 | goto error; /* 2 roots, or SAFE guard node. */ |
1184 | else if (*(ptr->use) != var) | |
1185 | goto error; | |
22aa74c4 | 1186 | |
1187 | prev = ptr; | |
1188 | ptr = ptr->next; | |
a002e999 | 1189 | |
1190 | /* Avoid infinite loops. 50,000,000 uses probably indicates a | |
1191 | problem. */ | |
f04f077c | 1192 | if (count++ > 50000000) |
1fa3a8f6 | 1193 | goto error; |
22aa74c4 | 1194 | } |
1195 | ||
1196 | /* Verify list in the other direction. */ | |
1197 | prev = list; | |
1198 | for (ptr = list->prev; ptr != list; ) | |
1199 | { | |
1200 | if (prev != ptr->next) | |
1fa3a8f6 | 1201 | goto error; |
22aa74c4 | 1202 | prev = ptr; |
1203 | ptr = ptr->prev; | |
1204 | if (count-- < 0) | |
1fa3a8f6 | 1205 | goto error; |
22aa74c4 | 1206 | } |
1207 | ||
1208 | if (count != 0) | |
1fa3a8f6 | 1209 | goto error; |
22aa74c4 | 1210 | |
1211 | return false; | |
1fa3a8f6 | 1212 | |
1213 | error: | |
75a70cf9 | 1214 | if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt)) |
1fa3a8f6 | 1215 | { |
75a70cf9 | 1216 | fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt); |
1217 | print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); | |
1fa3a8f6 | 1218 | } |
48e1416a | 1219 | fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, |
1fa3a8f6 | 1220 | (void *)ptr->use); |
1221 | print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); | |
9af5ce0c | 1222 | fprintf (f, "\n"); |
1fa3a8f6 | 1223 | return true; |
22aa74c4 | 1224 | } |
1225 | ||
1226 | ||
1227 | /* Dump all the immediate uses to FILE. */ | |
1228 | ||
1229 | void | |
1230 | dump_immediate_uses_for (FILE *file, tree var) | |
1231 | { | |
1232 | imm_use_iterator iter; | |
1233 | use_operand_p use_p; | |
1234 | ||
1235 | gcc_assert (var && TREE_CODE (var) == SSA_NAME); | |
1236 | ||
1237 | print_generic_expr (file, var, TDF_SLIM); | |
1238 | fprintf (file, " : -->"); | |
1239 | if (has_zero_uses (var)) | |
1240 | fprintf (file, " no uses.\n"); | |
1241 | else | |
1242 | if (has_single_use (var)) | |
1243 | fprintf (file, " single use.\n"); | |
1244 | else | |
1245 | fprintf (file, "%d uses.\n", num_imm_uses (var)); | |
1246 | ||
1247 | FOR_EACH_IMM_USE_FAST (use_p, iter, var) | |
1248 | { | |
75a70cf9 | 1249 | if (use_p->loc.stmt == NULL && use_p->use == NULL) |
66c8f3a9 | 1250 | fprintf (file, "***end of stmt iterator marker***\n"); |
b66731e8 | 1251 | else |
66c8f3a9 | 1252 | if (!is_gimple_reg (USE_FROM_PTR (use_p))) |
75a70cf9 | 1253 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); |
66c8f3a9 | 1254 | else |
75a70cf9 | 1255 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); |
22aa74c4 | 1256 | } |
9af5ce0c | 1257 | fprintf (file, "\n"); |
22aa74c4 | 1258 | } |
1259 | ||
a002e999 | 1260 | |
22aa74c4 | 1261 | /* Dump all the immediate uses to FILE. */ |
1262 | ||
1263 | void | |
1264 | dump_immediate_uses (FILE *file) | |
1265 | { | |
1266 | tree var; | |
1267 | unsigned int x; | |
1268 | ||
1269 | fprintf (file, "Immediate_uses: \n\n"); | |
1270 | for (x = 1; x < num_ssa_names; x++) | |
1271 | { | |
9af5ce0c | 1272 | var = ssa_name (x); |
22aa74c4 | 1273 | if (!var) |
1274 | continue; | |
1275 | dump_immediate_uses_for (file, var); | |
1276 | } | |
1277 | } | |
1278 | ||
1279 | ||
1280 | /* Dump def-use edges on stderr. */ | |
1281 | ||
4b987fac | 1282 | DEBUG_FUNCTION void |
22aa74c4 | 1283 | debug_immediate_uses (void) |
1284 | { | |
1285 | dump_immediate_uses (stderr); | |
1286 | } | |
1287 | ||
f6255040 | 1288 | |
22aa74c4 | 1289 | /* Dump def-use edges on stderr. */ |
1290 | ||
4b987fac | 1291 | DEBUG_FUNCTION void |
22aa74c4 | 1292 | debug_immediate_uses_for (tree var) |
1293 | { | |
1294 | dump_immediate_uses_for (stderr, var); | |
5b110d39 | 1295 | } |
de6ed584 | 1296 | |
1297 | ||
dd277d48 | 1298 | /* Unlink STMTs virtual definition from the IL by propagating its use. */ |
1299 | ||
1300 | void | |
1301 | unlink_stmt_vdef (gimple stmt) | |
1302 | { | |
1303 | use_operand_p use_p; | |
1304 | imm_use_iterator iter; | |
1305 | gimple use_stmt; | |
1306 | tree vdef = gimple_vdef (stmt); | |
13ff78a4 | 1307 | tree vuse = gimple_vuse (stmt); |
dd277d48 | 1308 | |
1309 | if (!vdef | |
1310 | || TREE_CODE (vdef) != SSA_NAME) | |
1311 | return; | |
1312 | ||
13ff78a4 | 1313 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef) |
dd277d48 | 1314 | { |
1315 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
13ff78a4 | 1316 | SET_USE (use_p, vuse); |
dd277d48 | 1317 | } |
de6ed584 | 1318 | |
13ff78a4 | 1319 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)) |
1320 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1; | |
de6ed584 | 1321 | } |
dd277d48 | 1322 | |
8f6fa493 | 1323 | |
1324 | /* Return true if the var whose chain of uses starts at PTR has no | |
1325 | nondebug uses. */ | |
1326 | bool | |
1327 | has_zero_uses_1 (const ssa_use_operand_t *head) | |
1328 | { | |
1329 | const ssa_use_operand_t *ptr; | |
1330 | ||
1331 | for (ptr = head->next; ptr != head; ptr = ptr->next) | |
1332 | if (!is_gimple_debug (USE_STMT (ptr))) | |
1333 | return false; | |
1334 | ||
1335 | return true; | |
1336 | } | |
1337 | ||
1338 | ||
1339 | /* Return true if the var whose chain of uses starts at PTR has a | |
1340 | single nondebug use. Set USE_P and STMT to that single nondebug | |
1341 | use, if so, or to NULL otherwise. */ | |
1342 | bool | |
1343 | single_imm_use_1 (const ssa_use_operand_t *head, | |
1344 | use_operand_p *use_p, gimple *stmt) | |
1345 | { | |
1346 | ssa_use_operand_t *ptr, *single_use = 0; | |
1347 | ||
1348 | for (ptr = head->next; ptr != head; ptr = ptr->next) | |
1349 | if (!is_gimple_debug (USE_STMT (ptr))) | |
1350 | { | |
1351 | if (single_use) | |
1352 | { | |
1353 | single_use = NULL; | |
1354 | break; | |
1355 | } | |
1356 | single_use = ptr; | |
1357 | } | |
1358 | ||
1359 | if (use_p) | |
1360 | *use_p = single_use; | |
1361 | ||
1362 | if (stmt) | |
1363 | *stmt = single_use ? single_use->loc.stmt : NULL; | |
1364 | ||
1365 | return single_use; | |
1366 | } |