]>
Commit | Line | Data |
---|---|---|
4ee9c684 | 1 | /* SSA operands management for trees. |
711789cc | 2 | Copyright (C) 2003-2013 Free Software Foundation, Inc. |
4ee9c684 | 3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8c4c00c1 | 8 | the Free Software Foundation; either version 3, or (at your option) |
4ee9c684 | 9 | any later version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
23 | #include "tm.h" | |
24 | #include "tree.h" | |
25 | #include "flags.h" | |
26 | #include "function.h" | |
ce084dfc | 27 | #include "gimple-pretty-print.h" |
073c1fd5 | 28 | #include "bitmap.h" |
29 | #include "gimple.h" | |
30 | #include "gimple-ssa.h" | |
31 | #include "tree-phinodes.h" | |
32 | #include "ssa-iterators.h" | |
33 | #include "tree-ssanames.h" | |
4ee9c684 | 34 | #include "tree-inline.h" |
b9ed1410 | 35 | #include "timevar.h" |
36 | #include "dumpfile.h" | |
4ee9c684 | 37 | #include "ggc.h" |
38 | #include "timevar.h" | |
acc70efa | 39 | #include "langhooks.h" |
85f3d834 | 40 | #include "diagnostic-core.h" |
41 | ||
5b110d39 | 42 | |
48e1416a | 43 | /* This file contains the code required to manage the operands cache of the |
44 | SSA optimizer. For every stmt, we maintain an operand cache in the stmt | |
45 | annotation. This cache contains operands that will be of interest to | |
46 | optimizers and other passes wishing to manipulate the IL. | |
5b110d39 | 47 | |
48e1416a | 48 | The operand type are broken up into REAL and VIRTUAL operands. The real |
49 | operands are represented as pointers into the stmt's operand tree. Thus | |
5b110d39 | 50 | any manipulation of the real operands will be reflected in the actual tree. |
48e1416a | 51 | Virtual operands are represented solely in the cache, although the base |
52 | variable for the SSA_NAME may, or may not occur in the stmt's tree. | |
5b110d39 | 53 | Manipulation of the virtual operands will not be reflected in the stmt tree. |
54 | ||
48e1416a | 55 | The routines in this file are concerned with creating this operand cache |
5b110d39 | 56 | from a stmt tree. |
57 | ||
48e1416a | 58 | The operand tree is the parsed by the various get_* routines which look |
59 | through the stmt tree for the occurrence of operands which may be of | |
60 | interest, and calls are made to the append_* routines whenever one is | |
61 | found. There are 4 of these routines, each representing one of the | |
4fb5e5ca | 62 | 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. |
5b110d39 | 63 | |
48e1416a | 64 | The append_* routines check for duplication, and simply keep a list of |
5b110d39 | 65 | unique objects for each operand type in the build_* extendable vectors. |
66 | ||
48e1416a | 67 | Once the stmt tree is completely parsed, the finalize_ssa_operands() |
68 | routine is called, which proceeds to perform the finalization routine | |
4fb5e5ca | 69 | on each of the 4 operand vectors which have been built up. |
5b110d39 | 70 | |
48e1416a | 71 | If the stmt had a previous operand cache, the finalization routines |
72 | attempt to match up the new operands with the old ones. If it's a perfect | |
73 | match, the old vector is simply reused. If it isn't a perfect match, then | |
74 | a new vector is created and the new operands are placed there. For | |
75 | virtual operands, if the previous cache had SSA_NAME version of a | |
76 | variable, and that same variable occurs in the same operands cache, then | |
5b110d39 | 77 | the new cache vector will also get the same SSA_NAME. |
78 | ||
4ec25329 | 79 | i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new |
80 | operand vector for VUSE, then the new vector will also be modified | |
81 | such that it contains 'a_5' rather than 'a'. */ | |
5b110d39 | 82 | |
4fb5e5ca | 83 | |
59b2314d | 84 | /* Flags to describe operand properties in helpers. */ |
4ee9c684 | 85 | |
86 | /* By default, operands are loaded. */ | |
4fb5e5ca | 87 | #define opf_use 0 |
4ee9c684 | 88 | |
48e1416a | 89 | /* Operand is the target of an assignment expression or a |
f6255040 | 90 | call-clobbered variable. */ |
4fb5e5ca | 91 | #define opf_def (1 << 0) |
2cf24776 | 92 | |
4ee9c684 | 93 | /* No virtual operands should be created in the expression. This is used |
94 | when traversing ADDR_EXPR nodes which have different semantics than | |
95 | other expressions. Inside an ADDR_EXPR node, the only operands that we | |
96 | need to consider are indices into arrays. For instance, &a.b[i] should | |
97 | generate a USE of 'i' but it should not generate a VUSE for 'a' nor a | |
98 | VUSE for 'b'. */ | |
4fb5e5ca | 99 | #define opf_no_vops (1 << 1) |
4ee9c684 | 100 | |
4fb5e5ca | 101 | /* Operand is an implicit reference. This is used to distinguish |
75a70cf9 | 102 | explicit assignments in the form of MODIFY_EXPR from |
4fb5e5ca | 103 | clobbering sites like function calls or ASM_EXPRs. */ |
104 | #define opf_implicit (1 << 2) | |
868a0f34 | 105 | |
182cf5a9 | 106 | /* Operand is in a place where address-taken does not imply addressable. */ |
107 | #define opf_non_addressable (1 << 3) | |
108 | ||
109 | /* Operand is in a place where opf_non_addressable does not apply. */ | |
110 | #define opf_not_non_addressable (1 << 4) | |
111 | ||
4ee9c684 | 112 | /* Array for building all the use operands. */ |
f1f41a6c | 113 | static vec<tree> build_uses; |
4ee9c684 | 114 | |
dd277d48 | 115 | /* The built VDEF operand. */ |
116 | static tree build_vdef; | |
4ee9c684 | 117 | |
dd277d48 | 118 | /* The built VUSE operand. */ |
119 | static tree build_vuse; | |
4ee9c684 | 120 | |
48e1416a | 121 | /* Bitmap obstack for our datastructures that needs to survive across |
a7614546 | 122 | compilations of multiple functions. */ |
363d040e | 123 | static bitmap_obstack operands_bitmap_obstack; |
085b7aab | 124 | |
75a70cf9 | 125 | static void get_expr_operands (gimple, tree *, int); |
fa999566 | 126 | |
fcbe34ba | 127 | /* Number of functions with initialized ssa_operands. */ |
128 | static int n_initialized = 0; | |
5b110d39 | 129 | |
582791b0 | 130 | /* Accessor to tree-ssa-operands.c caches. */ |
131 | static inline struct ssa_operands * | |
132 | gimple_ssa_operands (const struct function *fun) | |
133 | { | |
134 | return &fun->gimple_df->ssa_operands; | |
135 | } | |
136 | ||
fa999566 | 137 | |
f6255040 | 138 | /* Return true if the SSA operands cache is active. */ |
5b110d39 | 139 | |
b66731e8 | 140 | bool |
8d672d12 | 141 | ssa_operands_active (struct function *fun) |
4ee9c684 | 142 | { |
8d672d12 | 143 | if (fun == NULL) |
75a70cf9 | 144 | return false; |
145 | ||
8d672d12 | 146 | return fun->gimple_df && gimple_ssa_operands (fun)->ops_active; |
b66731e8 | 147 | } |
4ee9c684 | 148 | |
48e1416a | 149 | |
dd277d48 | 150 | /* Create the VOP variable, an artificial global variable to act as a |
151 | representative of all of the virtual operands FUD chain. */ | |
fa999566 | 152 | |
dd277d48 | 153 | static void |
5084b2e4 | 154 | create_vop_var (struct function *fn) |
dadb7503 | 155 | { |
dd277d48 | 156 | tree global_var; |
157 | ||
5084b2e4 | 158 | gcc_assert (fn->gimple_df->vop == NULL_TREE); |
dd277d48 | 159 | |
e60a6f7b | 160 | global_var = build_decl (BUILTINS_LOCATION, VAR_DECL, |
161 | get_identifier (".MEM"), | |
dd277d48 | 162 | void_type_node); |
163 | DECL_ARTIFICIAL (global_var) = 1; | |
164 | TREE_READONLY (global_var) = 0; | |
165 | DECL_EXTERNAL (global_var) = 1; | |
166 | TREE_STATIC (global_var) = 1; | |
167 | TREE_USED (global_var) = 1; | |
168 | DECL_CONTEXT (global_var) = NULL_TREE; | |
169 | TREE_THIS_VOLATILE (global_var) = 0; | |
170 | TREE_ADDRESSABLE (global_var) = 0; | |
5084b2e4 | 171 | VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1; |
dd277d48 | 172 | |
5084b2e4 | 173 | fn->gimple_df->vop = global_var; |
dadb7503 | 174 | } |
dadb7503 | 175 | |
dd277d48 | 176 | /* These are the sizes of the operand memory buffer in bytes which gets |
177 | allocated each time more operands space is required. The final value is | |
178 | the amount that is allocated every time after that. | |
179 | In 1k we can fit 25 use operands (or 63 def operands) on a host with | |
180 | 8 byte pointers, that would be 10 statements each with 1 def and 2 | |
181 | uses. */ | |
48e1416a | 182 | |
dadb7503 | 183 | #define OP_SIZE_INIT 0 |
dd277d48 | 184 | #define OP_SIZE_1 (1024 - sizeof (void *)) |
185 | #define OP_SIZE_2 (1024 * 4 - sizeof (void *)) | |
186 | #define OP_SIZE_3 (1024 * 16 - sizeof (void *)) | |
dadb7503 | 187 | |
b66731e8 | 188 | /* Initialize the operand cache routines. */ |
189 | ||
190 | void | |
5084b2e4 | 191 | init_ssa_operands (struct function *fn) |
b66731e8 | 192 | { |
fcbe34ba | 193 | if (!n_initialized++) |
194 | { | |
f1f41a6c | 195 | build_uses.create (10); |
dd277d48 | 196 | build_vuse = NULL_TREE; |
197 | build_vdef = NULL_TREE; | |
363d040e | 198 | bitmap_obstack_initialize (&operands_bitmap_obstack); |
fcbe34ba | 199 | } |
200 | ||
5084b2e4 | 201 | gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL); |
202 | gimple_ssa_operands (fn)->operand_memory_index | |
203 | = gimple_ssa_operands (fn)->ssa_operand_mem_size; | |
204 | gimple_ssa_operands (fn)->ops_active = true; | |
205 | gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT; | |
206 | create_vop_var (fn); | |
b66731e8 | 207 | } |
4ee9c684 | 208 | |
5b110d39 | 209 | |
b66731e8 | 210 | /* Dispose of anything required by the operand routines. */ |
211 | ||
212 | void | |
213 | fini_ssa_operands (void) | |
214 | { | |
215 | struct ssa_operand_memory_d *ptr; | |
4fb5e5ca | 216 | |
fcbe34ba | 217 | if (!--n_initialized) |
218 | { | |
f1f41a6c | 219 | build_uses.release (); |
dd277d48 | 220 | build_vdef = NULL_TREE; |
221 | build_vuse = NULL_TREE; | |
fcbe34ba | 222 | } |
4fb5e5ca | 223 | |
fcbe34ba | 224 | gimple_ssa_operands (cfun)->free_uses = NULL; |
4fb5e5ca | 225 | |
fcbe34ba | 226 | while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL) |
b66731e8 | 227 | { |
fcbe34ba | 228 | gimple_ssa_operands (cfun)->operand_memory |
229 | = gimple_ssa_operands (cfun)->operand_memory->next; | |
b66731e8 | 230 | ggc_free (ptr); |
5b110d39 | 231 | } |
232 | ||
fcbe34ba | 233 | gimple_ssa_operands (cfun)->ops_active = false; |
4fb5e5ca | 234 | |
363d040e | 235 | if (!n_initialized) |
236 | bitmap_obstack_release (&operands_bitmap_obstack); | |
75a70cf9 | 237 | |
dd277d48 | 238 | cfun->gimple_df->vop = NULL_TREE; |
b66731e8 | 239 | } |
5b110d39 | 240 | |
4ee9c684 | 241 | |
dd277d48 | 242 | /* Return memory for an operand of size SIZE. */ |
48e1416a | 243 | |
b66731e8 | 244 | static inline void * |
245 | ssa_operand_alloc (unsigned size) | |
246 | { | |
247 | char *ptr; | |
4fb5e5ca | 248 | |
5bb6976b | 249 | gcc_assert (size == sizeof (struct use_optype_d)); |
dd277d48 | 250 | |
fcbe34ba | 251 | if (gimple_ssa_operands (cfun)->operand_memory_index + size |
363d040e | 252 | >= gimple_ssa_operands (cfun)->ssa_operand_mem_size) |
b66731e8 | 253 | { |
254 | struct ssa_operand_memory_d *ptr; | |
dadb7503 | 255 | |
dd277d48 | 256 | switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size) |
257 | { | |
258 | case OP_SIZE_INIT: | |
259 | gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1; | |
260 | break; | |
261 | case OP_SIZE_1: | |
262 | gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2; | |
263 | break; | |
264 | case OP_SIZE_2: | |
265 | case OP_SIZE_3: | |
266 | gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3; | |
267 | break; | |
268 | default: | |
269 | gcc_unreachable (); | |
270 | } | |
dadb7503 | 271 | |
ba72912a | 272 | |
273 | ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *) | |
274 | + gimple_ssa_operands (cfun)->ssa_operand_mem_size); | |
275 | ||
fcbe34ba | 276 | ptr->next = gimple_ssa_operands (cfun)->operand_memory; |
277 | gimple_ssa_operands (cfun)->operand_memory = ptr; | |
278 | gimple_ssa_operands (cfun)->operand_memory_index = 0; | |
b66731e8 | 279 | } |
dd277d48 | 280 | |
fcbe34ba | 281 | ptr = &(gimple_ssa_operands (cfun)->operand_memory |
282 | ->mem[gimple_ssa_operands (cfun)->operand_memory_index]); | |
283 | gimple_ssa_operands (cfun)->operand_memory_index += size; | |
b66731e8 | 284 | return ptr; |
4ee9c684 | 285 | } |
286 | ||
5b110d39 | 287 | |
dadb7503 | 288 | /* Allocate a USE operand. */ |
289 | ||
4fb5e5ca | 290 | static inline struct use_optype_d * |
291 | alloc_use (void) | |
292 | { | |
293 | struct use_optype_d *ret; | |
294 | if (gimple_ssa_operands (cfun)->free_uses) | |
295 | { | |
296 | ret = gimple_ssa_operands (cfun)->free_uses; | |
297 | gimple_ssa_operands (cfun)->free_uses | |
298 | = gimple_ssa_operands (cfun)->free_uses->next; | |
299 | } | |
300 | else | |
dadb7503 | 301 | ret = (struct use_optype_d *) |
302 | ssa_operand_alloc (sizeof (struct use_optype_d)); | |
4fb5e5ca | 303 | return ret; |
304 | } | |
305 | ||
306 | ||
dadb7503 | 307 | /* Adds OP to the list of uses of statement STMT after LAST. */ |
b5b59dda | 308 | |
4fb5e5ca | 309 | static inline use_optype_p |
75a70cf9 | 310 | add_use_op (gimple stmt, tree *op, use_optype_p last) |
b5b59dda | 311 | { |
f0d6e81c | 312 | use_optype_p new_use; |
313 | ||
314 | new_use = alloc_use (); | |
315 | USE_OP_PTR (new_use)->use = op; | |
316 | link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt); | |
317 | last->next = new_use; | |
318 | new_use->next = NULL; | |
319 | return new_use; | |
b5b59dda | 320 | } |
321 | ||
b5b59dda | 322 | |
b5b59dda | 323 | |
b5b59dda | 324 | /* Takes elements from build_defs and turns them into def operands of STMT. |
f1f41a6c | 325 | TODO -- Make build_defs vec of tree *. */ |
b5b59dda | 326 | |
327 | static inline void | |
75a70cf9 | 328 | finalize_ssa_defs (gimple stmt) |
b5b59dda | 329 | { |
dd277d48 | 330 | /* Pre-pend the vdef we may have built. */ |
331 | if (build_vdef != NULL_TREE) | |
332 | { | |
333 | tree oldvdef = gimple_vdef (stmt); | |
334 | if (oldvdef | |
335 | && TREE_CODE (oldvdef) == SSA_NAME) | |
336 | oldvdef = SSA_NAME_VAR (oldvdef); | |
337 | if (oldvdef != build_vdef) | |
338 | gimple_set_vdef (stmt, build_vdef); | |
dd277d48 | 339 | } |
340 | ||
dd277d48 | 341 | /* Clear and unlink a no longer necessary VDEF. */ |
342 | if (build_vdef == NULL_TREE | |
343 | && gimple_vdef (stmt) != NULL_TREE) | |
344 | { | |
345 | if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) | |
346 | { | |
347 | unlink_stmt_vdef (stmt); | |
348 | release_ssa_name (gimple_vdef (stmt)); | |
349 | } | |
350 | gimple_set_vdef (stmt, NULL_TREE); | |
351 | } | |
352 | ||
353 | /* If we have a non-SSA_NAME VDEF, mark it for renaming. */ | |
354 | if (gimple_vdef (stmt) | |
355 | && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME) | |
e70e8b13 | 356 | { |
357 | cfun->gimple_df->rename_vops = 1; | |
358 | cfun->gimple_df->ssa_renaming_needed = 1; | |
359 | } | |
b5b59dda | 360 | } |
b66731e8 | 361 | |
4ee9c684 | 362 | |
b5b59dda | 363 | /* Takes elements from build_uses and turns them into use operands of STMT. |
f1f41a6c | 364 | TODO -- Make build_uses vec of tree *. */ |
b5b59dda | 365 | |
366 | static inline void | |
75a70cf9 | 367 | finalize_ssa_uses (gimple stmt) |
b5b59dda | 368 | { |
369 | unsigned new_i; | |
370 | struct use_optype_d new_list; | |
371 | use_optype_p old_ops, ptr, last; | |
b5b59dda | 372 | |
dd277d48 | 373 | /* Pre-pend the VUSE we may have built. */ |
374 | if (build_vuse != NULL_TREE) | |
375 | { | |
376 | tree oldvuse = gimple_vuse (stmt); | |
377 | if (oldvuse | |
378 | && TREE_CODE (oldvuse) == SSA_NAME) | |
379 | oldvuse = SSA_NAME_VAR (oldvuse); | |
380 | if (oldvuse != (build_vuse != NULL_TREE | |
381 | ? build_vuse : build_vdef)) | |
382 | gimple_set_vuse (stmt, NULL_TREE); | |
f1f41a6c | 383 | build_uses.safe_insert (0, (tree)gimple_vuse_ptr (stmt)); |
dd277d48 | 384 | } |
385 | ||
b5b59dda | 386 | new_list.next = NULL; |
387 | last = &new_list; | |
388 | ||
75a70cf9 | 389 | old_ops = gimple_use_ops (stmt); |
b5b59dda | 390 | |
dd277d48 | 391 | /* Clear a no longer necessary VUSE. */ |
392 | if (build_vuse == NULL_TREE | |
393 | && gimple_vuse (stmt) != NULL_TREE) | |
394 | gimple_set_vuse (stmt, NULL_TREE); | |
395 | ||
b5b59dda | 396 | /* If there is anything in the old list, free it. */ |
397 | if (old_ops) | |
398 | { | |
399 | for (ptr = old_ops; ptr; ptr = ptr->next) | |
400 | delink_imm_use (USE_OP_PTR (ptr)); | |
fcbe34ba | 401 | old_ops->next = gimple_ssa_operands (cfun)->free_uses; |
402 | gimple_ssa_operands (cfun)->free_uses = old_ops; | |
b5b59dda | 403 | } |
404 | ||
dd277d48 | 405 | /* If we added a VUSE, make sure to set the operand if it is not already |
406 | present and mark it for renaming. */ | |
407 | if (build_vuse != NULL_TREE | |
408 | && gimple_vuse (stmt) == NULL_TREE) | |
409 | { | |
410 | gimple_set_vuse (stmt, gimple_vop (cfun)); | |
e70e8b13 | 411 | cfun->gimple_df->rename_vops = 1; |
412 | cfun->gimple_df->ssa_renaming_needed = 1; | |
dd277d48 | 413 | } |
414 | ||
09aca5bc | 415 | /* Now create nodes for all the new nodes. */ |
f1f41a6c | 416 | for (new_i = 0; new_i < build_uses.length (); new_i++) |
e70e8b13 | 417 | { |
f1f41a6c | 418 | tree *op = (tree *) build_uses[new_i]; |
e70e8b13 | 419 | last = add_use_op (stmt, op, last); |
420 | } | |
09aca5bc | 421 | |
b5b59dda | 422 | /* Now set the stmt's operands. */ |
75a70cf9 | 423 | gimple_set_use_ops (stmt, new_list.next); |
4ee9c684 | 424 | } |
5b110d39 | 425 | |
4fb5e5ca | 426 | |
427 | /* Clear the in_list bits and empty the build array for VDEFs and | |
428 | VUSEs. */ | |
b5b59dda | 429 | |
430 | static inline void | |
4fb5e5ca | 431 | cleanup_build_arrays (void) |
b5b59dda | 432 | { |
dd277d48 | 433 | build_vdef = NULL_TREE; |
434 | build_vuse = NULL_TREE; | |
f1f41a6c | 435 | build_uses.truncate (0); |
2cf24776 | 436 | } |
437 | ||
4ee9c684 | 438 | |
5b110d39 | 439 | /* Finalize all the build vectors, fill the new ones into INFO. */ |
48e1416a | 440 | |
5b110d39 | 441 | static inline void |
75a70cf9 | 442 | finalize_ssa_stmt_operands (gimple stmt) |
5b110d39 | 443 | { |
b66731e8 | 444 | finalize_ssa_defs (stmt); |
445 | finalize_ssa_uses (stmt); | |
4fb5e5ca | 446 | cleanup_build_arrays (); |
4ee9c684 | 447 | } |
448 | ||
449 | ||
5b110d39 | 450 | /* Start the process of building up operands vectors in INFO. */ |
451 | ||
452 | static inline void | |
453 | start_ssa_stmt_operands (void) | |
4ee9c684 | 454 | { |
f1f41a6c | 455 | gcc_assert (build_uses.length () == 0); |
dd277d48 | 456 | gcc_assert (build_vuse == NULL_TREE); |
457 | gcc_assert (build_vdef == NULL_TREE); | |
4ee9c684 | 458 | } |
459 | ||
460 | ||
5b110d39 | 461 | /* Add USE_P to the list of pointers to operands. */ |
4ee9c684 | 462 | |
463 | static inline void | |
5b110d39 | 464 | append_use (tree *use_p) |
4ee9c684 | 465 | { |
f1f41a6c | 466 | build_uses.safe_push ((tree) use_p); |
4ee9c684 | 467 | } |
468 | ||
469 | ||
4fb5e5ca | 470 | /* Add VAR to the set of variables that require a VDEF operator. */ |
4ee9c684 | 471 | |
5b110d39 | 472 | static inline void |
4fb5e5ca | 473 | append_vdef (tree var) |
4ee9c684 | 474 | { |
17fbf1b8 | 475 | if (!optimize) |
476 | return; | |
477 | ||
dd277d48 | 478 | gcc_assert ((build_vdef == NULL_TREE |
479 | || build_vdef == var) | |
480 | && (build_vuse == NULL_TREE | |
481 | || build_vuse == var)); | |
4fb5e5ca | 482 | |
dd277d48 | 483 | build_vdef = var; |
484 | build_vuse = var; | |
4ee9c684 | 485 | } |
486 | ||
487 | ||
4fb5e5ca | 488 | /* Add VAR to the set of variables that require a VUSE operator. */ |
4ee9c684 | 489 | |
5b110d39 | 490 | static inline void |
491 | append_vuse (tree var) | |
4ee9c684 | 492 | { |
17fbf1b8 | 493 | if (!optimize) |
494 | return; | |
495 | ||
dd277d48 | 496 | gcc_assert (build_vuse == NULL_TREE |
497 | || build_vuse == var); | |
4ee9c684 | 498 | |
dd277d48 | 499 | build_vuse = var; |
22aa74c4 | 500 | } |
501 | ||
dd277d48 | 502 | /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ |
f0e6e3c1 | 503 | |
dd277d48 | 504 | static void |
505 | add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags) | |
506 | { | |
507 | /* Add virtual operands to the stmt, unless the caller has specifically | |
508 | requested not to do that (used when adding operands inside an | |
509 | ADDR_EXPR expression). */ | |
510 | if (flags & opf_no_vops) | |
511 | return; | |
512 | ||
9845d120 | 513 | gcc_assert (!is_gimple_debug (stmt)); |
514 | ||
dd277d48 | 515 | if (flags & opf_def) |
516 | append_vdef (gimple_vop (cfun)); | |
517 | else | |
518 | append_vuse (gimple_vop (cfun)); | |
b66731e8 | 519 | } |
520 | ||
b66731e8 | 521 | |
75a70cf9 | 522 | /* Add *VAR_P to the appropriate operand array for statement STMT. |
523 | FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, | |
524 | it will be added to the statement's real operands, otherwise it is | |
525 | added to virtual operands. */ | |
fa999566 | 526 | |
527 | static void | |
75a70cf9 | 528 | add_stmt_operand (tree *var_p, gimple stmt, int flags) |
b66731e8 | 529 | { |
2f4ec87c | 530 | tree var = *var_p; |
b66731e8 | 531 | |
75a70cf9 | 532 | gcc_assert (SSA_VAR_P (*var_p)); |
b66731e8 | 533 | |
2f4ec87c | 534 | if (is_gimple_reg (var)) |
b66731e8 | 535 | { |
fa999566 | 536 | /* The variable is a GIMPLE register. Add it to real operands. */ |
4fb5e5ca | 537 | if (flags & opf_def) |
5bb6976b | 538 | ; |
fa999566 | 539 | else |
540 | append_use (var_p); | |
5bb6976b | 541 | if (DECL_P (*var_p)) |
542 | cfun->gimple_df->ssa_renaming_needed = 1; | |
b66731e8 | 543 | } |
fa999566 | 544 | else |
2f4ec87c | 545 | { |
546 | /* Mark statements with volatile operands. */ | |
547 | if (!(flags & opf_no_vops) | |
548 | && TREE_THIS_VOLATILE (var)) | |
549 | gimple_set_has_volatile_ops (stmt, true); | |
550 | ||
551 | /* The variable is a memory access. Add virtual operands. */ | |
552 | add_virtual_operand (stmt, flags); | |
553 | } | |
fa999566 | 554 | } |
b66731e8 | 555 | |
6d5ec6f8 | 556 | /* Mark the base address of REF as having its address taken. |
557 | REF may be a single variable whose address has been taken or any | |
558 | other valid GIMPLE memory reference (structure reference, array, | |
559 | etc). */ | |
b66731e8 | 560 | |
fa999566 | 561 | static void |
6d5ec6f8 | 562 | mark_address_taken (tree ref) |
4ec25329 | 563 | { |
dd277d48 | 564 | tree var; |
b66731e8 | 565 | |
dd277d48 | 566 | /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF |
567 | as the only thing we take the address of. If VAR is a structure, | |
568 | taking the address of a field means that the whole structure may | |
569 | be referenced using pointer arithmetic. See PR 21407 and the | |
570 | ensuing mailing list discussion. */ | |
571 | var = get_base_address (ref); | |
182cf5a9 | 572 | if (var) |
573 | { | |
574 | if (DECL_P (var)) | |
575 | TREE_ADDRESSABLE (var) = 1; | |
576 | else if (TREE_CODE (var) == MEM_REF | |
577 | && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR | |
578 | && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0))) | |
579 | TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1; | |
580 | } | |
22aa74c4 | 581 | } |
582 | ||
4ec25329 | 583 | |
5d9de213 | 584 | /* A subroutine of get_expr_operands to handle MEM_REF. |
cb7f680b | 585 | |
182cf5a9 | 586 | STMT is the statement being processed, EXPR is the MEM_REF |
cb7f680b | 587 | that got us here. |
48e1416a | 588 | |
5bb6976b | 589 | FLAGS is as in get_expr_operands. */ |
cb7f680b | 590 | |
591 | static void | |
5bb6976b | 592 | get_indirect_ref_operands (gimple stmt, tree expr, int flags) |
cb7f680b | 593 | { |
594 | tree *pptr = &TREE_OPERAND (expr, 0); | |
cb7f680b | 595 | |
587838bb | 596 | if (!(flags & opf_no_vops) |
597 | && TREE_THIS_VOLATILE (expr)) | |
75a70cf9 | 598 | gimple_set_has_volatile_ops (stmt, true); |
cb7f680b | 599 | |
dd277d48 | 600 | /* Add the VOP. */ |
601 | add_virtual_operand (stmt, flags); | |
602 | ||
603 | /* If requested, add a USE operand for the base pointer. */ | |
5bb6976b | 604 | get_expr_operands (stmt, pptr, |
605 | opf_non_addressable | opf_use | |
606 | | (flags & (opf_no_vops|opf_not_non_addressable))); | |
cb7f680b | 607 | } |
a002e999 | 608 | |
4ec25329 | 609 | |
fa999566 | 610 | /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ |
4ee9c684 | 611 | |
612 | static void | |
75a70cf9 | 613 | get_tmr_operands (gimple stmt, tree expr, int flags) |
4ee9c684 | 614 | { |
587838bb | 615 | if (!(flags & opf_no_vops) |
616 | && TREE_THIS_VOLATILE (expr)) | |
9c44b395 | 617 | gimple_set_has_volatile_ops (stmt, true); |
618 | ||
4fb5e5ca | 619 | /* First record the real operands. */ |
afcada6e | 620 | get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops)); |
621 | get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops)); | |
28daba6f | 622 | get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops)); |
4ee9c684 | 623 | |
dd277d48 | 624 | add_virtual_operand (stmt, flags); |
fa999566 | 625 | } |
626 | ||
627 | ||
75a70cf9 | 628 | /* If STMT is a call that may clobber globals and other symbols that |
629 | escape, add them to the VDEF/VUSE lists for it. */ | |
fa999566 | 630 | |
631 | static void | |
dd277d48 | 632 | maybe_add_call_vops (gimple stmt) |
fa999566 | 633 | { |
75a70cf9 | 634 | int call_flags = gimple_call_flags (stmt); |
fa999566 | 635 | |
4fb5e5ca | 636 | /* If aliases have been computed already, add VDEF or VUSE |
fa999566 | 637 | operands for all the symbols that have been found to be |
4fb5e5ca | 638 | call-clobbered. */ |
dd277d48 | 639 | if (!(call_flags & ECF_NOVOPS)) |
fa999566 | 640 | { |
48e1416a | 641 | /* A 'pure' or a 'const' function never call-clobbers anything. |
642 | A 'noreturn' function might, but since we don't return anyway | |
643 | there is no point in recording that. */ | |
75a70cf9 | 644 | if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) |
dd277d48 | 645 | add_virtual_operand (stmt, opf_def); |
fa999566 | 646 | else if (!(call_flags & ECF_CONST)) |
dd277d48 | 647 | add_virtual_operand (stmt, opf_use); |
fa999566 | 648 | } |
fa999566 | 649 | } |
650 | ||
651 | ||
652 | /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ | |
653 | ||
654 | static void | |
75a70cf9 | 655 | get_asm_expr_operands (gimple stmt) |
fa999566 | 656 | { |
75a70cf9 | 657 | size_t i, noutputs; |
4fb5e5ca | 658 | const char **oconstraints; |
fa999566 | 659 | const char *constraint; |
660 | bool allows_mem, allows_reg, is_inout; | |
4fb5e5ca | 661 | |
75a70cf9 | 662 | noutputs = gimple_asm_noutputs (stmt); |
4fb5e5ca | 663 | oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); |
fa999566 | 664 | |
4fb5e5ca | 665 | /* Gather all output operands. */ |
75a70cf9 | 666 | for (i = 0; i < gimple_asm_noutputs (stmt); i++) |
fa999566 | 667 | { |
75a70cf9 | 668 | tree link = gimple_asm_output_op (stmt, i); |
f6255040 | 669 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
670 | oconstraints[i] = constraint; | |
671 | parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
672 | &allows_reg, &is_inout); | |
fa999566 | 673 | |
674 | /* This should have been split in gimplify_asm_expr. */ | |
675 | gcc_assert (!allows_reg || !is_inout); | |
676 | ||
677 | /* Memory operands are addressable. Note that STMT needs the | |
678 | address of this operand. */ | |
679 | if (!allows_reg && allows_mem) | |
7f2d9047 | 680 | mark_address_taken (TREE_VALUE (link)); |
fa999566 | 681 | |
182cf5a9 | 682 | get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable); |
fa999566 | 683 | } |
684 | ||
4fb5e5ca | 685 | /* Gather all input operands. */ |
75a70cf9 | 686 | for (i = 0; i < gimple_asm_ninputs (stmt); i++) |
fa999566 | 687 | { |
75a70cf9 | 688 | tree link = gimple_asm_input_op (stmt, i); |
fa999566 | 689 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
4fb5e5ca | 690 | parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, |
691 | &allows_mem, &allows_reg); | |
fa999566 | 692 | |
693 | /* Memory operands are addressable. Note that STMT needs the | |
694 | address of this operand. */ | |
695 | if (!allows_reg && allows_mem) | |
7f2d9047 | 696 | mark_address_taken (TREE_VALUE (link)); |
fa999566 | 697 | |
182cf5a9 | 698 | get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable); |
fa999566 | 699 | } |
700 | ||
4fb5e5ca | 701 | /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ |
97cf41ec | 702 | if (gimple_asm_clobbers_memory_p (stmt)) |
703 | add_virtual_operand (stmt, opf_def); | |
f6255040 | 704 | } |
705 | ||
706 | ||
fa999566 | 707 | /* Recursively scan the expression pointed to by EXPR_P in statement |
f6255040 | 708 | STMT. FLAGS is one of the OPF_* constants modifying how to |
709 | interpret the operands found. */ | |
fa999566 | 710 | |
711 | static void | |
75a70cf9 | 712 | get_expr_operands (gimple stmt, tree *expr_p, int flags) |
fa999566 | 713 | { |
714 | enum tree_code code; | |
f0d6e81c | 715 | enum tree_code_class codeclass; |
fa999566 | 716 | tree expr = *expr_p; |
9845d120 | 717 | int uflags = opf_use; |
fa999566 | 718 | |
719 | if (expr == NULL) | |
720 | return; | |
721 | ||
9845d120 | 722 | if (is_gimple_debug (stmt)) |
723 | uflags |= (flags & opf_no_vops); | |
724 | ||
fa999566 | 725 | code = TREE_CODE (expr); |
f0d6e81c | 726 | codeclass = TREE_CODE_CLASS (code); |
fa999566 | 727 | |
728 | switch (code) | |
729 | { | |
730 | case ADDR_EXPR: | |
731 | /* Taking the address of a variable does not represent a | |
732 | reference to it, but the fact that the statement takes its | |
733 | address will be of interest to some passes (e.g. alias | |
734 | resolution). */ | |
182cf5a9 | 735 | if ((!(flags & opf_non_addressable) |
736 | || (flags & opf_not_non_addressable)) | |
737 | && !is_gimple_debug (stmt)) | |
9845d120 | 738 | mark_address_taken (TREE_OPERAND (expr, 0)); |
fa999566 | 739 | |
740 | /* If the address is invariant, there may be no interesting | |
741 | variable references inside. */ | |
742 | if (is_gimple_min_invariant (expr)) | |
743 | return; | |
744 | ||
745 | /* Otherwise, there may be variables referenced inside but there | |
746 | should be no VUSEs created, since the referenced objects are | |
747 | not really accessed. The only operands that we should find | |
748 | here are ARRAY_REF indices which will always be real operands | |
749 | (GIMPLE does not allow non-registers as array indices). */ | |
750 | flags |= opf_no_vops; | |
182cf5a9 | 751 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), |
752 | flags | opf_not_non_addressable); | |
fa999566 | 753 | return; |
754 | ||
755 | case SSA_NAME: | |
fa999566 | 756 | case VAR_DECL: |
757 | case PARM_DECL: | |
758 | case RESULT_DECL: | |
75a70cf9 | 759 | add_stmt_operand (expr_p, stmt, flags); |
2afb4be3 | 760 | return; |
fa999566 | 761 | |
688ff29b | 762 | case DEBUG_EXPR_DECL: |
763 | gcc_assert (gimple_debug_bind_p (stmt)); | |
764 | return; | |
765 | ||
182cf5a9 | 766 | case MEM_REF: |
5bb6976b | 767 | get_indirect_ref_operands (stmt, expr, flags); |
fa999566 | 768 | return; |
769 | ||
770 | case TARGET_MEM_REF: | |
771 | get_tmr_operands (stmt, expr, flags); | |
772 | return; | |
773 | ||
fa999566 | 774 | case ARRAY_REF: |
f6255040 | 775 | case ARRAY_RANGE_REF: |
fa999566 | 776 | case COMPONENT_REF: |
777 | case REALPART_EXPR: | |
778 | case IMAGPART_EXPR: | |
779 | { | |
587838bb | 780 | if (!(flags & opf_no_vops) |
781 | && TREE_THIS_VOLATILE (expr)) | |
75a70cf9 | 782 | gimple_set_has_volatile_ops (stmt, true); |
8e4c4d3b | 783 | |
4fb5e5ca | 784 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
48e1416a | 785 | |
2be14d8b | 786 | if (code == COMPONENT_REF) |
7fecfde9 | 787 | { |
587838bb | 788 | if (!(flags & opf_no_vops) |
789 | && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) | |
75a70cf9 | 790 | gimple_set_has_volatile_ops (stmt, true); |
9845d120 | 791 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); |
7fecfde9 | 792 | } |
f6255040 | 793 | else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) |
03c253f3 | 794 | { |
9845d120 | 795 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); |
796 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); | |
797 | get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags); | |
03c253f3 | 798 | } |
a002e999 | 799 | |
2be14d8b | 800 | return; |
801 | } | |
a002e999 | 802 | |
80f06481 | 803 | case WITH_SIZE_EXPR: |
454b4e1f | 804 | /* WITH_SIZE_EXPR is a pass-through reference to its first argument, |
80f06481 | 805 | and an rvalue reference to its second argument. */ |
9845d120 | 806 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); |
5b110d39 | 807 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
80f06481 | 808 | return; |
809 | ||
07c03fb0 | 810 | case COND_EXPR: |
bd2ec699 | 811 | case VEC_COND_EXPR: |
f4803722 | 812 | case VEC_PERM_EXPR: |
9845d120 | 813 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags); |
814 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); | |
815 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); | |
07c03fb0 | 816 | return; |
817 | ||
f9c6943b | 818 | case CONSTRUCTOR: |
819 | { | |
820 | /* General aggregate CONSTRUCTORs have been decomposed, but they | |
821 | are still in use as the COMPLEX_EXPR equivalent for vectors. */ | |
c75b4594 | 822 | constructor_elt *ce; |
823 | unsigned HOST_WIDE_INT idx; | |
f9c6943b | 824 | |
3c25489e | 825 | /* A volatile constructor is actually TREE_CLOBBER_P, transfer |
826 | the volatility to the statement, don't use TREE_CLOBBER_P for | |
827 | mirroring the other uses of THIS_VOLATILE in this file. */ | |
587838bb | 828 | if (!(flags & opf_no_vops) |
829 | && TREE_THIS_VOLATILE (expr)) | |
3c25489e | 830 | gimple_set_has_volatile_ops (stmt, true); |
831 | ||
c75b4594 | 832 | for (idx = 0; |
f1f41a6c | 833 | vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce); |
c75b4594 | 834 | idx++) |
9845d120 | 835 | get_expr_operands (stmt, &ce->value, uflags); |
f9c6943b | 836 | |
837 | return; | |
838 | } | |
839 | ||
c9a1e1e0 | 840 | case BIT_FIELD_REF: |
587838bb | 841 | if (!(flags & opf_no_vops) |
842 | && TREE_THIS_VOLATILE (expr)) | |
1e342984 | 843 | gimple_set_has_volatile_ops (stmt, true); |
844 | /* FALLTHRU */ | |
845 | ||
2c0bc8ce | 846 | case VIEW_CONVERT_EXPR: |
c9a1e1e0 | 847 | do_unary: |
5b110d39 | 848 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
4ee9c684 | 849 | return; |
4ee9c684 | 850 | |
c9a1e1e0 | 851 | case COMPOUND_EXPR: |
852 | case OBJ_TYPE_REF: | |
88dbf20f | 853 | case ASSERT_EXPR: |
c9a1e1e0 | 854 | do_binary: |
855 | { | |
5b110d39 | 856 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
857 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
c9a1e1e0 | 858 | return; |
859 | } | |
860 | ||
4a61a337 | 861 | case DOT_PROD_EXPR: |
b056d812 | 862 | case REALIGN_LOAD_EXPR: |
00f4f705 | 863 | case WIDEN_MULT_PLUS_EXPR: |
864 | case WIDEN_MULT_MINUS_EXPR: | |
156f51b9 | 865 | case FMA_EXPR: |
b056d812 | 866 | { |
867 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
00f4f705 | 868 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); |
869 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags); | |
870 | return; | |
b056d812 | 871 | } |
872 | ||
c9a1e1e0 | 873 | case FUNCTION_DECL: |
c9a1e1e0 | 874 | case LABEL_DECL: |
bef99423 | 875 | case CONST_DECL: |
75a70cf9 | 876 | case CASE_LABEL_EXPR: |
fa999566 | 877 | /* Expressions that make no memory references. */ |
c9a1e1e0 | 878 | return; |
fa999566 | 879 | |
880 | default: | |
f0d6e81c | 881 | if (codeclass == tcc_unary) |
fa999566 | 882 | goto do_unary; |
f0d6e81c | 883 | if (codeclass == tcc_binary || codeclass == tcc_comparison) |
fa999566 | 884 | goto do_binary; |
f0d6e81c | 885 | if (codeclass == tcc_constant || codeclass == tcc_type) |
fa999566 | 886 | return; |
a002e999 | 887 | } |
c9a1e1e0 | 888 | |
fa999566 | 889 | /* If we get here, something has gone wrong. */ |
890 | #ifdef ENABLE_CHECKING | |
891 | fprintf (stderr, "unhandled expression in get_expr_operands():\n"); | |
892 | debug_tree (expr); | |
893 | fputs ("\n", stderr); | |
894 | #endif | |
895 | gcc_unreachable (); | |
c9a1e1e0 | 896 | } |
897 | ||
a002e999 | 898 | |
f6255040 | 899 | /* Parse STMT looking for operands. When finished, the various |
900 | build_* operand vectors will have potential operands in them. */ | |
901 | ||
aed164c3 | 902 | static void |
75a70cf9 | 903 | parse_ssa_operands (gimple stmt) |
aed164c3 | 904 | { |
75a70cf9 | 905 | enum gimple_code code = gimple_code (stmt); |
b65fbe25 | 906 | size_t i, n, start = 0; |
aed164c3 | 907 | |
b65fbe25 | 908 | switch (code) |
9845d120 | 909 | { |
b65fbe25 | 910 | case GIMPLE_ASM: |
911 | get_asm_expr_operands (stmt); | |
912 | break; | |
913 | ||
914 | case GIMPLE_TRANSACTION: | |
915 | /* The start of a transaction is a memory barrier. */ | |
916 | add_virtual_operand (stmt, opf_def | opf_use); | |
917 | break; | |
918 | ||
919 | case GIMPLE_DEBUG: | |
9845d120 | 920 | if (gimple_debug_bind_p (stmt) |
921 | && gimple_debug_bind_has_value_p (stmt)) | |
922 | get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt), | |
923 | opf_use | opf_no_vops); | |
b65fbe25 | 924 | break; |
fa999566 | 925 | |
b65fbe25 | 926 | case GIMPLE_RETURN: |
927 | append_vuse (gimple_vop (cfun)); | |
928 | goto do_default; | |
fa999566 | 929 | |
b65fbe25 | 930 | case GIMPLE_CALL: |
75a70cf9 | 931 | /* Add call-clobbered operands, if needed. */ |
b65fbe25 | 932 | maybe_add_call_vops (stmt); |
933 | /* FALLTHRU */ | |
2109076a | 934 | |
b65fbe25 | 935 | case GIMPLE_ASSIGN: |
936 | get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def); | |
937 | start = 1; | |
938 | /* FALLTHRU */ | |
939 | ||
940 | default: | |
941 | do_default: | |
942 | n = gimple_num_ops (stmt); | |
943 | for (i = start; i < n; i++) | |
944 | get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use); | |
945 | break; | |
ca9c9daf | 946 | } |
aed164c3 | 947 | } |
948 | ||
a002e999 | 949 | |
fa999566 | 950 | /* Create an operands cache for STMT. */ |
c9a1e1e0 | 951 | |
952 | static void | |
75a70cf9 | 953 | build_ssa_operands (gimple stmt) |
c9a1e1e0 | 954 | { |
6d5ec6f8 | 955 | /* Initially assume that the statement has no volatile operands. */ |
75a70cf9 | 956 | gimple_set_has_volatile_ops (stmt, false); |
75a70cf9 | 957 | |
fa999566 | 958 | start_ssa_stmt_operands (); |
fa999566 | 959 | parse_ssa_operands (stmt); |
fa999566 | 960 | finalize_ssa_stmt_operands (stmt); |
961 | } | |
39b644e9 | 962 | |
85f3d834 | 963 | /* Verifies SSA statement operands. */ |
964 | ||
965 | DEBUG_FUNCTION bool | |
966 | verify_ssa_operands (gimple stmt) | |
967 | { | |
968 | use_operand_p use_p; | |
969 | def_operand_p def_p; | |
970 | ssa_op_iter iter; | |
971 | unsigned i; | |
972 | tree use, def; | |
973 | bool volatile_p = gimple_has_volatile_ops (stmt); | |
974 | ||
975 | /* build_ssa_operands w/o finalizing them. */ | |
976 | gimple_set_has_volatile_ops (stmt, false); | |
977 | start_ssa_stmt_operands (); | |
978 | parse_ssa_operands (stmt); | |
979 | ||
980 | /* Now verify the built operands are the same as present in STMT. */ | |
981 | def = gimple_vdef (stmt); | |
982 | if (def | |
983 | && TREE_CODE (def) == SSA_NAME) | |
984 | def = SSA_NAME_VAR (def); | |
985 | if (build_vdef != def) | |
986 | { | |
987 | error ("virtual definition of statement not up-to-date"); | |
988 | return true; | |
989 | } | |
990 | if (gimple_vdef (stmt) | |
991 | && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P | |
992 | || DEF_FROM_PTR (def_p) != gimple_vdef (stmt))) | |
993 | { | |
994 | error ("virtual def operand missing for stmt"); | |
995 | return true; | |
996 | } | |
997 | ||
998 | use = gimple_vuse (stmt); | |
999 | if (use | |
1000 | && TREE_CODE (use) == SSA_NAME) | |
1001 | use = SSA_NAME_VAR (use); | |
1002 | if (build_vuse != use) | |
1003 | { | |
1004 | error ("virtual use of statement not up-to-date"); | |
1005 | return true; | |
1006 | } | |
1007 | if (gimple_vuse (stmt) | |
1008 | && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P | |
1009 | || USE_FROM_PTR (use_p) != gimple_vuse (stmt))) | |
1010 | { | |
1011 | error ("virtual use operand missing for stmt"); | |
1012 | return true; | |
1013 | } | |
1014 | ||
1015 | FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) | |
1016 | { | |
f1f41a6c | 1017 | FOR_EACH_VEC_ELT (build_uses, i, use) |
85f3d834 | 1018 | { |
1019 | if (use_p->use == (tree *)use) | |
1020 | { | |
f1f41a6c | 1021 | build_uses[i] = NULL_TREE; |
85f3d834 | 1022 | break; |
1023 | } | |
1024 | } | |
f1f41a6c | 1025 | if (i == build_uses.length ()) |
85f3d834 | 1026 | { |
1027 | error ("excess use operand for stmt"); | |
1028 | debug_generic_expr (USE_FROM_PTR (use_p)); | |
1029 | return true; | |
1030 | } | |
1031 | } | |
f1f41a6c | 1032 | FOR_EACH_VEC_ELT (build_uses, i, use) |
85f3d834 | 1033 | if (use != NULL_TREE) |
1034 | { | |
1035 | error ("use operand missing for stmt"); | |
1036 | debug_generic_expr (*(tree *)use); | |
1037 | return true; | |
1038 | } | |
1039 | ||
85f3d834 | 1040 | if (gimple_has_volatile_ops (stmt) != volatile_p) |
1041 | { | |
1042 | error ("stmt volatile flag not up-to-date"); | |
1043 | return true; | |
1044 | } | |
1045 | ||
1046 | cleanup_build_arrays (); | |
1047 | return false; | |
1048 | } | |
1049 | ||
4ec25329 | 1050 | |
28c92cbb | 1051 | /* Releases the operands of STMT back to their freelists, and clears |
1052 | the stmt operand lists. */ | |
1053 | ||
1054 | void | |
75a70cf9 | 1055 | free_stmt_operands (gimple stmt) |
28c92cbb | 1056 | { |
75a70cf9 | 1057 | use_optype_p uses = gimple_use_ops (stmt), last_use; |
28c92cbb | 1058 | |
28c92cbb | 1059 | if (uses) |
1060 | { | |
1061 | for (last_use = uses; last_use->next; last_use = last_use->next) | |
1062 | delink_imm_use (USE_OP_PTR (last_use)); | |
1063 | delink_imm_use (USE_OP_PTR (last_use)); | |
1064 | last_use->next = gimple_ssa_operands (cfun)->free_uses; | |
1065 | gimple_ssa_operands (cfun)->free_uses = uses; | |
75a70cf9 | 1066 | gimple_set_use_ops (stmt, NULL); |
28c92cbb | 1067 | } |
1068 | ||
75a70cf9 | 1069 | if (gimple_has_mem_ops (stmt)) |
1070 | { | |
dd277d48 | 1071 | gimple_set_vuse (stmt, NULL_TREE); |
1072 | gimple_set_vdef (stmt, NULL_TREE); | |
75a70cf9 | 1073 | } |
c9a1e1e0 | 1074 | } |
1075 | ||
0b3f639d | 1076 | |
7dd75889 | 1077 | /* Get the operands of statement STMT. */ |
a002e999 | 1078 | |
fa999566 | 1079 | void |
75a70cf9 | 1080 | update_stmt_operands (gimple stmt) |
fa999566 | 1081 | { |
f6255040 | 1082 | /* If update_stmt_operands is called before SSA is initialized, do |
1083 | nothing. */ | |
8d672d12 | 1084 | if (!ssa_operands_active (cfun)) |
fa999566 | 1085 | return; |
2b99acb8 | 1086 | |
fa999566 | 1087 | timevar_push (TV_TREE_OPS); |
2b99acb8 | 1088 | |
e1f47fd3 | 1089 | /* If the stmt is a noreturn call queue it to be processed by |
1090 | split_bbs_on_noreturn_calls during cfg cleanup. */ | |
1091 | if (is_gimple_call (stmt) | |
1092 | && gimple_call_noreturn_p (stmt)) | |
f1f41a6c | 1093 | vec_safe_push (MODIFIED_NORETURN_CALLS (cfun), stmt); |
e1f47fd3 | 1094 | |
75a70cf9 | 1095 | gcc_assert (gimple_modified_p (stmt)); |
fa999566 | 1096 | build_ssa_operands (stmt); |
75a70cf9 | 1097 | gimple_set_modified (stmt, false); |
4ee9c684 | 1098 | |
fa999566 | 1099 | timevar_pop (TV_TREE_OPS); |
1100 | } | |
b0b70f22 | 1101 | |
f6255040 | 1102 | |
fa999566 | 1103 | /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done |
1104 | to test the validity of the swap operation. */ | |
b0b70f22 | 1105 | |
fa999566 | 1106 | void |
8f6fa493 | 1107 | swap_ssa_operands (gimple stmt, tree *exp0, tree *exp1) |
fa999566 | 1108 | { |
1109 | tree op0, op1; | |
1110 | op0 = *exp0; | |
1111 | op1 = *exp1; | |
0b3f639d | 1112 | |
8f6fa493 | 1113 | gcc_checking_assert (ssa_operands_active (cfun)); |
1114 | ||
1115 | if (op0 != op1) | |
fa999566 | 1116 | { |
8f6fa493 | 1117 | /* Attempt to preserve the relative positions of these two operands in |
1118 | their * respective immediate use lists by adjusting their use pointer | |
1119 | to point to the new operand position. */ | |
fa999566 | 1120 | use_optype_p use0, use1, ptr; |
1121 | use0 = use1 = NULL; | |
0b3f639d | 1122 | |
fa999566 | 1123 | /* Find the 2 operands in the cache, if they are there. */ |
75a70cf9 | 1124 | for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) |
fa999566 | 1125 | if (USE_OP_PTR (ptr)->use == exp0) |
1126 | { | |
1127 | use0 = ptr; | |
1128 | break; | |
1129 | } | |
0b3f639d | 1130 | |
75a70cf9 | 1131 | for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) |
fa999566 | 1132 | if (USE_OP_PTR (ptr)->use == exp1) |
1133 | { | |
1134 | use1 = ptr; | |
1135 | break; | |
1136 | } | |
1137 | ||
f3f02af0 | 1138 | /* And adjust their location to point to the new position of the |
1139 | operand. */ | |
1140 | if (use0) | |
1141 | USE_OP_PTR (use0)->use = exp1; | |
1142 | if (use1) | |
1143 | USE_OP_PTR (use1)->use = exp0; | |
fa999566 | 1144 | |
8f6fa493 | 1145 | /* Now swap the data. */ |
1146 | *exp0 = op1; | |
1147 | *exp1 = op0; | |
1148 | } | |
0b3f639d | 1149 | } |
1150 | ||
75a70cf9 | 1151 | |
22aa74c4 | 1152 | /* Scan the immediate_use list for VAR making sure its linked properly. |
f6255040 | 1153 | Return TRUE if there is a problem and emit an error message to F. */ |
22aa74c4 | 1154 | |
4b987fac | 1155 | DEBUG_FUNCTION bool |
22aa74c4 | 1156 | verify_imm_links (FILE *f, tree var) |
1157 | { | |
b66731e8 | 1158 | use_operand_p ptr, prev, list; |
22aa74c4 | 1159 | int count; |
1160 | ||
1161 | gcc_assert (TREE_CODE (var) == SSA_NAME); | |
1162 | ||
1163 | list = &(SSA_NAME_IMM_USE_NODE (var)); | |
1164 | gcc_assert (list->use == NULL); | |
1165 | ||
1166 | if (list->prev == NULL) | |
1167 | { | |
1168 | gcc_assert (list->next == NULL); | |
1169 | return false; | |
1170 | } | |
1171 | ||
1172 | prev = list; | |
1173 | count = 0; | |
1174 | for (ptr = list->next; ptr != list; ) | |
1175 | { | |
1176 | if (prev != ptr->prev) | |
1fa3a8f6 | 1177 | goto error; |
48e1416a | 1178 | |
22aa74c4 | 1179 | if (ptr->use == NULL) |
1fa3a8f6 | 1180 | goto error; /* 2 roots, or SAFE guard node. */ |
1181 | else if (*(ptr->use) != var) | |
1182 | goto error; | |
22aa74c4 | 1183 | |
1184 | prev = ptr; | |
1185 | ptr = ptr->next; | |
a002e999 | 1186 | |
1187 | /* Avoid infinite loops. 50,000,000 uses probably indicates a | |
1188 | problem. */ | |
f04f077c | 1189 | if (count++ > 50000000) |
1fa3a8f6 | 1190 | goto error; |
22aa74c4 | 1191 | } |
1192 | ||
1193 | /* Verify list in the other direction. */ | |
1194 | prev = list; | |
1195 | for (ptr = list->prev; ptr != list; ) | |
1196 | { | |
1197 | if (prev != ptr->next) | |
1fa3a8f6 | 1198 | goto error; |
22aa74c4 | 1199 | prev = ptr; |
1200 | ptr = ptr->prev; | |
1201 | if (count-- < 0) | |
1fa3a8f6 | 1202 | goto error; |
22aa74c4 | 1203 | } |
1204 | ||
1205 | if (count != 0) | |
1fa3a8f6 | 1206 | goto error; |
22aa74c4 | 1207 | |
1208 | return false; | |
1fa3a8f6 | 1209 | |
1210 | error: | |
75a70cf9 | 1211 | if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt)) |
1fa3a8f6 | 1212 | { |
75a70cf9 | 1213 | fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt); |
1214 | print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); | |
1fa3a8f6 | 1215 | } |
48e1416a | 1216 | fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, |
1fa3a8f6 | 1217 | (void *)ptr->use); |
1218 | print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); | |
9af5ce0c | 1219 | fprintf (f, "\n"); |
1fa3a8f6 | 1220 | return true; |
22aa74c4 | 1221 | } |
1222 | ||
1223 | ||
1224 | /* Dump all the immediate uses to FILE. */ | |
1225 | ||
1226 | void | |
1227 | dump_immediate_uses_for (FILE *file, tree var) | |
1228 | { | |
1229 | imm_use_iterator iter; | |
1230 | use_operand_p use_p; | |
1231 | ||
1232 | gcc_assert (var && TREE_CODE (var) == SSA_NAME); | |
1233 | ||
1234 | print_generic_expr (file, var, TDF_SLIM); | |
1235 | fprintf (file, " : -->"); | |
1236 | if (has_zero_uses (var)) | |
1237 | fprintf (file, " no uses.\n"); | |
1238 | else | |
1239 | if (has_single_use (var)) | |
1240 | fprintf (file, " single use.\n"); | |
1241 | else | |
1242 | fprintf (file, "%d uses.\n", num_imm_uses (var)); | |
1243 | ||
1244 | FOR_EACH_IMM_USE_FAST (use_p, iter, var) | |
1245 | { | |
75a70cf9 | 1246 | if (use_p->loc.stmt == NULL && use_p->use == NULL) |
66c8f3a9 | 1247 | fprintf (file, "***end of stmt iterator marker***\n"); |
b66731e8 | 1248 | else |
66c8f3a9 | 1249 | if (!is_gimple_reg (USE_FROM_PTR (use_p))) |
75a70cf9 | 1250 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); |
66c8f3a9 | 1251 | else |
75a70cf9 | 1252 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); |
22aa74c4 | 1253 | } |
9af5ce0c | 1254 | fprintf (file, "\n"); |
22aa74c4 | 1255 | } |
1256 | ||
a002e999 | 1257 | |
22aa74c4 | 1258 | /* Dump all the immediate uses to FILE. */ |
1259 | ||
1260 | void | |
1261 | dump_immediate_uses (FILE *file) | |
1262 | { | |
1263 | tree var; | |
1264 | unsigned int x; | |
1265 | ||
1266 | fprintf (file, "Immediate_uses: \n\n"); | |
1267 | for (x = 1; x < num_ssa_names; x++) | |
1268 | { | |
9af5ce0c | 1269 | var = ssa_name (x); |
22aa74c4 | 1270 | if (!var) |
1271 | continue; | |
1272 | dump_immediate_uses_for (file, var); | |
1273 | } | |
1274 | } | |
1275 | ||
1276 | ||
1277 | /* Dump def-use edges on stderr. */ | |
1278 | ||
4b987fac | 1279 | DEBUG_FUNCTION void |
22aa74c4 | 1280 | debug_immediate_uses (void) |
1281 | { | |
1282 | dump_immediate_uses (stderr); | |
1283 | } | |
1284 | ||
f6255040 | 1285 | |
22aa74c4 | 1286 | /* Dump def-use edges on stderr. */ |
1287 | ||
4b987fac | 1288 | DEBUG_FUNCTION void |
22aa74c4 | 1289 | debug_immediate_uses_for (tree var) |
1290 | { | |
1291 | dump_immediate_uses_for (stderr, var); | |
5b110d39 | 1292 | } |
de6ed584 | 1293 | |
1294 | ||
dd277d48 | 1295 | /* Unlink STMTs virtual definition from the IL by propagating its use. */ |
1296 | ||
1297 | void | |
1298 | unlink_stmt_vdef (gimple stmt) | |
1299 | { | |
1300 | use_operand_p use_p; | |
1301 | imm_use_iterator iter; | |
1302 | gimple use_stmt; | |
1303 | tree vdef = gimple_vdef (stmt); | |
13ff78a4 | 1304 | tree vuse = gimple_vuse (stmt); |
dd277d48 | 1305 | |
1306 | if (!vdef | |
1307 | || TREE_CODE (vdef) != SSA_NAME) | |
1308 | return; | |
1309 | ||
13ff78a4 | 1310 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef) |
dd277d48 | 1311 | { |
1312 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
13ff78a4 | 1313 | SET_USE (use_p, vuse); |
dd277d48 | 1314 | } |
de6ed584 | 1315 | |
13ff78a4 | 1316 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)) |
1317 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1; | |
de6ed584 | 1318 | } |
dd277d48 | 1319 | |
8f6fa493 | 1320 | |
1321 | /* Return true if the var whose chain of uses starts at PTR has no | |
1322 | nondebug uses. */ | |
1323 | bool | |
1324 | has_zero_uses_1 (const ssa_use_operand_t *head) | |
1325 | { | |
1326 | const ssa_use_operand_t *ptr; | |
1327 | ||
1328 | for (ptr = head->next; ptr != head; ptr = ptr->next) | |
1329 | if (!is_gimple_debug (USE_STMT (ptr))) | |
1330 | return false; | |
1331 | ||
1332 | return true; | |
1333 | } | |
1334 | ||
1335 | ||
1336 | /* Return true if the var whose chain of uses starts at PTR has a | |
1337 | single nondebug use. Set USE_P and STMT to that single nondebug | |
1338 | use, if so, or to NULL otherwise. */ | |
1339 | bool | |
1340 | single_imm_use_1 (const ssa_use_operand_t *head, | |
1341 | use_operand_p *use_p, gimple *stmt) | |
1342 | { | |
1343 | ssa_use_operand_t *ptr, *single_use = 0; | |
1344 | ||
1345 | for (ptr = head->next; ptr != head; ptr = ptr->next) | |
1346 | if (!is_gimple_debug (USE_STMT (ptr))) | |
1347 | { | |
1348 | if (single_use) | |
1349 | { | |
1350 | single_use = NULL; | |
1351 | break; | |
1352 | } | |
1353 | single_use = ptr; | |
1354 | } | |
1355 | ||
1356 | if (use_p) | |
1357 | *use_p = single_use; | |
1358 | ||
1359 | if (stmt) | |
1360 | *stmt = single_use ? single_use->loc.stmt : NULL; | |
1361 | ||
1362 | return single_use; | |
1363 | } |