]>
Commit | Line | Data |
---|---|---|
4ee9c684 | 1 | /* SSA operands management for trees. |
3aea1f79 | 2 | Copyright (C) 2003-2014 Free Software Foundation, Inc. |
4ee9c684 | 3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8c4c00c1 | 8 | the Free Software Foundation; either version 3, or (at your option) |
4ee9c684 | 9 | any later version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
23 | #include "tm.h" | |
24 | #include "tree.h" | |
9ed99284 | 25 | #include "stmt.h" |
26 | #include "print-tree.h" | |
4ee9c684 | 27 | #include "flags.h" |
a3020f2f | 28 | #include "hashtab.h" |
29 | #include "hash-set.h" | |
30 | #include "vec.h" | |
31 | #include "machmode.h" | |
32 | #include "hard-reg-set.h" | |
33 | #include "input.h" | |
4ee9c684 | 34 | #include "function.h" |
ce084dfc | 35 | #include "gimple-pretty-print.h" |
073c1fd5 | 36 | #include "bitmap.h" |
94ea8568 | 37 | #include "predict.h" |
bc61cadb | 38 | #include "basic-block.h" |
39 | #include "tree-ssa-alias.h" | |
40 | #include "internal-fn.h" | |
41 | #include "gimple-expr.h" | |
42 | #include "is-a.h" | |
073c1fd5 | 43 | #include "gimple.h" |
44 | #include "gimple-ssa.h" | |
45 | #include "tree-phinodes.h" | |
46 | #include "ssa-iterators.h" | |
9ed99284 | 47 | #include "stringpool.h" |
073c1fd5 | 48 | #include "tree-ssanames.h" |
4ee9c684 | 49 | #include "tree-inline.h" |
b9ed1410 | 50 | #include "timevar.h" |
51 | #include "dumpfile.h" | |
4ee9c684 | 52 | #include "timevar.h" |
acc70efa | 53 | #include "langhooks.h" |
85f3d834 | 54 | #include "diagnostic-core.h" |
55 | ||
5b110d39 | 56 | |
48e1416a | 57 | /* This file contains the code required to manage the operands cache of the |
58 | SSA optimizer. For every stmt, we maintain an operand cache in the stmt | |
59 | annotation. This cache contains operands that will be of interest to | |
60 | optimizers and other passes wishing to manipulate the IL. | |
5b110d39 | 61 | |
48e1416a | 62 | The operand type are broken up into REAL and VIRTUAL operands. The real |
63 | operands are represented as pointers into the stmt's operand tree. Thus | |
5b110d39 | 64 | any manipulation of the real operands will be reflected in the actual tree. |
48e1416a | 65 | Virtual operands are represented solely in the cache, although the base |
66 | variable for the SSA_NAME may, or may not occur in the stmt's tree. | |
5b110d39 | 67 | Manipulation of the virtual operands will not be reflected in the stmt tree. |
68 | ||
48e1416a | 69 | The routines in this file are concerned with creating this operand cache |
5b110d39 | 70 | from a stmt tree. |
71 | ||
48e1416a | 72 | The operand tree is the parsed by the various get_* routines which look |
73 | through the stmt tree for the occurrence of operands which may be of | |
74 | interest, and calls are made to the append_* routines whenever one is | |
75 | found. There are 4 of these routines, each representing one of the | |
4fb5e5ca | 76 | 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. |
5b110d39 | 77 | |
48e1416a | 78 | The append_* routines check for duplication, and simply keep a list of |
5b110d39 | 79 | unique objects for each operand type in the build_* extendable vectors. |
80 | ||
48e1416a | 81 | Once the stmt tree is completely parsed, the finalize_ssa_operands() |
82 | routine is called, which proceeds to perform the finalization routine | |
4fb5e5ca | 83 | on each of the 4 operand vectors which have been built up. |
5b110d39 | 84 | |
48e1416a | 85 | If the stmt had a previous operand cache, the finalization routines |
86 | attempt to match up the new operands with the old ones. If it's a perfect | |
87 | match, the old vector is simply reused. If it isn't a perfect match, then | |
88 | a new vector is created and the new operands are placed there. For | |
89 | virtual operands, if the previous cache had SSA_NAME version of a | |
90 | variable, and that same variable occurs in the same operands cache, then | |
5b110d39 | 91 | the new cache vector will also get the same SSA_NAME. |
92 | ||
4ec25329 | 93 | i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new |
94 | operand vector for VUSE, then the new vector will also be modified | |
95 | such that it contains 'a_5' rather than 'a'. */ | |
5b110d39 | 96 | |
4fb5e5ca | 97 | |
59b2314d | 98 | /* Flags to describe operand properties in helpers. */ |
4ee9c684 | 99 | |
100 | /* By default, operands are loaded. */ | |
4fb5e5ca | 101 | #define opf_use 0 |
4ee9c684 | 102 | |
48e1416a | 103 | /* Operand is the target of an assignment expression or a |
f6255040 | 104 | call-clobbered variable. */ |
4fb5e5ca | 105 | #define opf_def (1 << 0) |
2cf24776 | 106 | |
4ee9c684 | 107 | /* No virtual operands should be created in the expression. This is used |
108 | when traversing ADDR_EXPR nodes which have different semantics than | |
109 | other expressions. Inside an ADDR_EXPR node, the only operands that we | |
110 | need to consider are indices into arrays. For instance, &a.b[i] should | |
111 | generate a USE of 'i' but it should not generate a VUSE for 'a' nor a | |
112 | VUSE for 'b'. */ | |
4fb5e5ca | 113 | #define opf_no_vops (1 << 1) |
4ee9c684 | 114 | |
182cf5a9 | 115 | /* Operand is in a place where address-taken does not imply addressable. */ |
116 | #define opf_non_addressable (1 << 3) | |
117 | ||
118 | /* Operand is in a place where opf_non_addressable does not apply. */ | |
119 | #define opf_not_non_addressable (1 << 4) | |
120 | ||
1ce90822 | 121 | /* Operand is having its address taken. */ |
122 | #define opf_address_taken (1 << 5) | |
123 | ||
4ee9c684 | 124 | /* Array for building all the use operands. */ |
f1f41a6c | 125 | static vec<tree> build_uses; |
4ee9c684 | 126 | |
dd277d48 | 127 | /* The built VDEF operand. */ |
128 | static tree build_vdef; | |
4ee9c684 | 129 | |
dd277d48 | 130 | /* The built VUSE operand. */ |
131 | static tree build_vuse; | |
4ee9c684 | 132 | |
48e1416a | 133 | /* Bitmap obstack for our datastructures that needs to survive across |
a7614546 | 134 | compilations of multiple functions. */ |
363d040e | 135 | static bitmap_obstack operands_bitmap_obstack; |
085b7aab | 136 | |
861b4e39 | 137 | static void get_expr_operands (struct function *, gimple, tree *, int); |
fa999566 | 138 | |
fcbe34ba | 139 | /* Number of functions with initialized ssa_operands. */ |
140 | static int n_initialized = 0; | |
5b110d39 | 141 | |
582791b0 | 142 | /* Accessor to tree-ssa-operands.c caches. */ |
143 | static inline struct ssa_operands * | |
144 | gimple_ssa_operands (const struct function *fun) | |
145 | { | |
146 | return &fun->gimple_df->ssa_operands; | |
147 | } | |
148 | ||
fa999566 | 149 | |
f6255040 | 150 | /* Return true if the SSA operands cache is active. */ |
5b110d39 | 151 | |
b66731e8 | 152 | bool |
8d672d12 | 153 | ssa_operands_active (struct function *fun) |
4ee9c684 | 154 | { |
8d672d12 | 155 | if (fun == NULL) |
75a70cf9 | 156 | return false; |
157 | ||
8d672d12 | 158 | return fun->gimple_df && gimple_ssa_operands (fun)->ops_active; |
b66731e8 | 159 | } |
4ee9c684 | 160 | |
48e1416a | 161 | |
dd277d48 | 162 | /* Create the VOP variable, an artificial global variable to act as a |
163 | representative of all of the virtual operands FUD chain. */ | |
fa999566 | 164 | |
dd277d48 | 165 | static void |
5084b2e4 | 166 | create_vop_var (struct function *fn) |
dadb7503 | 167 | { |
dd277d48 | 168 | tree global_var; |
169 | ||
5084b2e4 | 170 | gcc_assert (fn->gimple_df->vop == NULL_TREE); |
dd277d48 | 171 | |
e60a6f7b | 172 | global_var = build_decl (BUILTINS_LOCATION, VAR_DECL, |
173 | get_identifier (".MEM"), | |
dd277d48 | 174 | void_type_node); |
175 | DECL_ARTIFICIAL (global_var) = 1; | |
8792d9c3 | 176 | DECL_IGNORED_P (global_var) = 1; |
dd277d48 | 177 | TREE_READONLY (global_var) = 0; |
178 | DECL_EXTERNAL (global_var) = 1; | |
179 | TREE_STATIC (global_var) = 1; | |
180 | TREE_USED (global_var) = 1; | |
181 | DECL_CONTEXT (global_var) = NULL_TREE; | |
182 | TREE_THIS_VOLATILE (global_var) = 0; | |
183 | TREE_ADDRESSABLE (global_var) = 0; | |
5084b2e4 | 184 | VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1; |
dd277d48 | 185 | |
5084b2e4 | 186 | fn->gimple_df->vop = global_var; |
dadb7503 | 187 | } |
dadb7503 | 188 | |
dd277d48 | 189 | /* These are the sizes of the operand memory buffer in bytes which gets |
190 | allocated each time more operands space is required. The final value is | |
191 | the amount that is allocated every time after that. | |
192 | In 1k we can fit 25 use operands (or 63 def operands) on a host with | |
193 | 8 byte pointers, that would be 10 statements each with 1 def and 2 | |
194 | uses. */ | |
48e1416a | 195 | |
dadb7503 | 196 | #define OP_SIZE_INIT 0 |
dd277d48 | 197 | #define OP_SIZE_1 (1024 - sizeof (void *)) |
198 | #define OP_SIZE_2 (1024 * 4 - sizeof (void *)) | |
199 | #define OP_SIZE_3 (1024 * 16 - sizeof (void *)) | |
dadb7503 | 200 | |
b66731e8 | 201 | /* Initialize the operand cache routines. */ |
202 | ||
203 | void | |
5084b2e4 | 204 | init_ssa_operands (struct function *fn) |
b66731e8 | 205 | { |
fcbe34ba | 206 | if (!n_initialized++) |
207 | { | |
f1f41a6c | 208 | build_uses.create (10); |
dd277d48 | 209 | build_vuse = NULL_TREE; |
210 | build_vdef = NULL_TREE; | |
363d040e | 211 | bitmap_obstack_initialize (&operands_bitmap_obstack); |
fcbe34ba | 212 | } |
213 | ||
5084b2e4 | 214 | gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL); |
215 | gimple_ssa_operands (fn)->operand_memory_index | |
216 | = gimple_ssa_operands (fn)->ssa_operand_mem_size; | |
217 | gimple_ssa_operands (fn)->ops_active = true; | |
218 | gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT; | |
219 | create_vop_var (fn); | |
b66731e8 | 220 | } |
4ee9c684 | 221 | |
5b110d39 | 222 | |
b66731e8 | 223 | /* Dispose of anything required by the operand routines. */ |
224 | ||
225 | void | |
861b4e39 | 226 | fini_ssa_operands (struct function *fn) |
b66731e8 | 227 | { |
228 | struct ssa_operand_memory_d *ptr; | |
4fb5e5ca | 229 | |
fcbe34ba | 230 | if (!--n_initialized) |
231 | { | |
f1f41a6c | 232 | build_uses.release (); |
dd277d48 | 233 | build_vdef = NULL_TREE; |
234 | build_vuse = NULL_TREE; | |
fcbe34ba | 235 | } |
4fb5e5ca | 236 | |
861b4e39 | 237 | gimple_ssa_operands (fn)->free_uses = NULL; |
4fb5e5ca | 238 | |
861b4e39 | 239 | while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL) |
b66731e8 | 240 | { |
861b4e39 | 241 | gimple_ssa_operands (fn)->operand_memory |
242 | = gimple_ssa_operands (fn)->operand_memory->next; | |
b66731e8 | 243 | ggc_free (ptr); |
5b110d39 | 244 | } |
245 | ||
861b4e39 | 246 | gimple_ssa_operands (fn)->ops_active = false; |
4fb5e5ca | 247 | |
363d040e | 248 | if (!n_initialized) |
249 | bitmap_obstack_release (&operands_bitmap_obstack); | |
75a70cf9 | 250 | |
861b4e39 | 251 | fn->gimple_df->vop = NULL_TREE; |
b66731e8 | 252 | } |
5b110d39 | 253 | |
4ee9c684 | 254 | |
dd277d48 | 255 | /* Return memory for an operand of size SIZE. */ |
48e1416a | 256 | |
b66731e8 | 257 | static inline void * |
861b4e39 | 258 | ssa_operand_alloc (struct function *fn, unsigned size) |
b66731e8 | 259 | { |
260 | char *ptr; | |
4fb5e5ca | 261 | |
5bb6976b | 262 | gcc_assert (size == sizeof (struct use_optype_d)); |
dd277d48 | 263 | |
861b4e39 | 264 | if (gimple_ssa_operands (fn)->operand_memory_index + size |
265 | >= gimple_ssa_operands (fn)->ssa_operand_mem_size) | |
b66731e8 | 266 | { |
267 | struct ssa_operand_memory_d *ptr; | |
dadb7503 | 268 | |
861b4e39 | 269 | switch (gimple_ssa_operands (fn)->ssa_operand_mem_size) |
dd277d48 | 270 | { |
271 | case OP_SIZE_INIT: | |
861b4e39 | 272 | gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1; |
dd277d48 | 273 | break; |
274 | case OP_SIZE_1: | |
861b4e39 | 275 | gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2; |
dd277d48 | 276 | break; |
277 | case OP_SIZE_2: | |
278 | case OP_SIZE_3: | |
861b4e39 | 279 | gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3; |
dd277d48 | 280 | break; |
281 | default: | |
282 | gcc_unreachable (); | |
283 | } | |
dadb7503 | 284 | |
ba72912a | 285 | |
25a27413 | 286 | ptr = (ssa_operand_memory_d *) ggc_internal_alloc |
287 | (sizeof (void *) + gimple_ssa_operands (fn)->ssa_operand_mem_size); | |
ba72912a | 288 | |
861b4e39 | 289 | ptr->next = gimple_ssa_operands (fn)->operand_memory; |
290 | gimple_ssa_operands (fn)->operand_memory = ptr; | |
291 | gimple_ssa_operands (fn)->operand_memory_index = 0; | |
b66731e8 | 292 | } |
dd277d48 | 293 | |
861b4e39 | 294 | ptr = &(gimple_ssa_operands (fn)->operand_memory |
295 | ->mem[gimple_ssa_operands (fn)->operand_memory_index]); | |
296 | gimple_ssa_operands (fn)->operand_memory_index += size; | |
b66731e8 | 297 | return ptr; |
4ee9c684 | 298 | } |
299 | ||
5b110d39 | 300 | |
dadb7503 | 301 | /* Allocate a USE operand. */ |
302 | ||
4fb5e5ca | 303 | static inline struct use_optype_d * |
861b4e39 | 304 | alloc_use (struct function *fn) |
4fb5e5ca | 305 | { |
306 | struct use_optype_d *ret; | |
861b4e39 | 307 | if (gimple_ssa_operands (fn)->free_uses) |
4fb5e5ca | 308 | { |
861b4e39 | 309 | ret = gimple_ssa_operands (fn)->free_uses; |
310 | gimple_ssa_operands (fn)->free_uses | |
311 | = gimple_ssa_operands (fn)->free_uses->next; | |
4fb5e5ca | 312 | } |
313 | else | |
dadb7503 | 314 | ret = (struct use_optype_d *) |
861b4e39 | 315 | ssa_operand_alloc (fn, sizeof (struct use_optype_d)); |
4fb5e5ca | 316 | return ret; |
317 | } | |
318 | ||
319 | ||
dadb7503 | 320 | /* Adds OP to the list of uses of statement STMT after LAST. */ |
b5b59dda | 321 | |
4fb5e5ca | 322 | static inline use_optype_p |
861b4e39 | 323 | add_use_op (struct function *fn, gimple stmt, tree *op, use_optype_p last) |
b5b59dda | 324 | { |
f0d6e81c | 325 | use_optype_p new_use; |
326 | ||
861b4e39 | 327 | new_use = alloc_use (fn); |
f0d6e81c | 328 | USE_OP_PTR (new_use)->use = op; |
329 | link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt); | |
330 | last->next = new_use; | |
331 | new_use->next = NULL; | |
332 | return new_use; | |
b5b59dda | 333 | } |
334 | ||
b5b59dda | 335 | |
b5b59dda | 336 | |
b5b59dda | 337 | /* Takes elements from build_defs and turns them into def operands of STMT. |
f1f41a6c | 338 | TODO -- Make build_defs vec of tree *. */ |
b5b59dda | 339 | |
340 | static inline void | |
861b4e39 | 341 | finalize_ssa_defs (struct function *fn, gimple stmt) |
b5b59dda | 342 | { |
dd277d48 | 343 | /* Pre-pend the vdef we may have built. */ |
344 | if (build_vdef != NULL_TREE) | |
345 | { | |
346 | tree oldvdef = gimple_vdef (stmt); | |
347 | if (oldvdef | |
348 | && TREE_CODE (oldvdef) == SSA_NAME) | |
349 | oldvdef = SSA_NAME_VAR (oldvdef); | |
350 | if (oldvdef != build_vdef) | |
351 | gimple_set_vdef (stmt, build_vdef); | |
dd277d48 | 352 | } |
353 | ||
dd277d48 | 354 | /* Clear and unlink a no longer necessary VDEF. */ |
355 | if (build_vdef == NULL_TREE | |
356 | && gimple_vdef (stmt) != NULL_TREE) | |
357 | { | |
358 | if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) | |
359 | { | |
360 | unlink_stmt_vdef (stmt); | |
861b4e39 | 361 | release_ssa_name_fn (fn, gimple_vdef (stmt)); |
dd277d48 | 362 | } |
363 | gimple_set_vdef (stmt, NULL_TREE); | |
364 | } | |
365 | ||
366 | /* If we have a non-SSA_NAME VDEF, mark it for renaming. */ | |
367 | if (gimple_vdef (stmt) | |
368 | && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME) | |
e70e8b13 | 369 | { |
861b4e39 | 370 | fn->gimple_df->rename_vops = 1; |
371 | fn->gimple_df->ssa_renaming_needed = 1; | |
e70e8b13 | 372 | } |
b5b59dda | 373 | } |
b66731e8 | 374 | |
4ee9c684 | 375 | |
b5b59dda | 376 | /* Takes elements from build_uses and turns them into use operands of STMT. |
f1f41a6c | 377 | TODO -- Make build_uses vec of tree *. */ |
b5b59dda | 378 | |
379 | static inline void | |
861b4e39 | 380 | finalize_ssa_uses (struct function *fn, gimple stmt) |
b5b59dda | 381 | { |
382 | unsigned new_i; | |
383 | struct use_optype_d new_list; | |
384 | use_optype_p old_ops, ptr, last; | |
b5b59dda | 385 | |
dd277d48 | 386 | /* Pre-pend the VUSE we may have built. */ |
387 | if (build_vuse != NULL_TREE) | |
388 | { | |
389 | tree oldvuse = gimple_vuse (stmt); | |
390 | if (oldvuse | |
391 | && TREE_CODE (oldvuse) == SSA_NAME) | |
392 | oldvuse = SSA_NAME_VAR (oldvuse); | |
393 | if (oldvuse != (build_vuse != NULL_TREE | |
394 | ? build_vuse : build_vdef)) | |
395 | gimple_set_vuse (stmt, NULL_TREE); | |
f1f41a6c | 396 | build_uses.safe_insert (0, (tree)gimple_vuse_ptr (stmt)); |
dd277d48 | 397 | } |
398 | ||
b5b59dda | 399 | new_list.next = NULL; |
400 | last = &new_list; | |
401 | ||
75a70cf9 | 402 | old_ops = gimple_use_ops (stmt); |
b5b59dda | 403 | |
dd277d48 | 404 | /* Clear a no longer necessary VUSE. */ |
405 | if (build_vuse == NULL_TREE | |
406 | && gimple_vuse (stmt) != NULL_TREE) | |
407 | gimple_set_vuse (stmt, NULL_TREE); | |
408 | ||
b5b59dda | 409 | /* If there is anything in the old list, free it. */ |
410 | if (old_ops) | |
411 | { | |
05d5a6da | 412 | for (ptr = old_ops; ptr->next; ptr = ptr->next) |
b5b59dda | 413 | delink_imm_use (USE_OP_PTR (ptr)); |
05d5a6da | 414 | delink_imm_use (USE_OP_PTR (ptr)); |
415 | ptr->next = gimple_ssa_operands (fn)->free_uses; | |
861b4e39 | 416 | gimple_ssa_operands (fn)->free_uses = old_ops; |
b5b59dda | 417 | } |
418 | ||
dd277d48 | 419 | /* If we added a VUSE, make sure to set the operand if it is not already |
420 | present and mark it for renaming. */ | |
421 | if (build_vuse != NULL_TREE | |
422 | && gimple_vuse (stmt) == NULL_TREE) | |
423 | { | |
861b4e39 | 424 | gimple_set_vuse (stmt, gimple_vop (fn)); |
425 | fn->gimple_df->rename_vops = 1; | |
426 | fn->gimple_df->ssa_renaming_needed = 1; | |
dd277d48 | 427 | } |
428 | ||
09aca5bc | 429 | /* Now create nodes for all the new nodes. */ |
f1f41a6c | 430 | for (new_i = 0; new_i < build_uses.length (); new_i++) |
e70e8b13 | 431 | { |
f1f41a6c | 432 | tree *op = (tree *) build_uses[new_i]; |
861b4e39 | 433 | last = add_use_op (fn, stmt, op, last); |
e70e8b13 | 434 | } |
09aca5bc | 435 | |
b5b59dda | 436 | /* Now set the stmt's operands. */ |
75a70cf9 | 437 | gimple_set_use_ops (stmt, new_list.next); |
4ee9c684 | 438 | } |
5b110d39 | 439 | |
4fb5e5ca | 440 | |
441 | /* Clear the in_list bits and empty the build array for VDEFs and | |
442 | VUSEs. */ | |
b5b59dda | 443 | |
444 | static inline void | |
4fb5e5ca | 445 | cleanup_build_arrays (void) |
b5b59dda | 446 | { |
dd277d48 | 447 | build_vdef = NULL_TREE; |
448 | build_vuse = NULL_TREE; | |
f1f41a6c | 449 | build_uses.truncate (0); |
2cf24776 | 450 | } |
451 | ||
4ee9c684 | 452 | |
5b110d39 | 453 | /* Finalize all the build vectors, fill the new ones into INFO. */ |
48e1416a | 454 | |
5b110d39 | 455 | static inline void |
861b4e39 | 456 | finalize_ssa_stmt_operands (struct function *fn, gimple stmt) |
5b110d39 | 457 | { |
861b4e39 | 458 | finalize_ssa_defs (fn, stmt); |
459 | finalize_ssa_uses (fn, stmt); | |
4fb5e5ca | 460 | cleanup_build_arrays (); |
4ee9c684 | 461 | } |
462 | ||
463 | ||
5b110d39 | 464 | /* Start the process of building up operands vectors in INFO. */ |
465 | ||
466 | static inline void | |
467 | start_ssa_stmt_operands (void) | |
4ee9c684 | 468 | { |
f1f41a6c | 469 | gcc_assert (build_uses.length () == 0); |
dd277d48 | 470 | gcc_assert (build_vuse == NULL_TREE); |
471 | gcc_assert (build_vdef == NULL_TREE); | |
4ee9c684 | 472 | } |
473 | ||
474 | ||
5b110d39 | 475 | /* Add USE_P to the list of pointers to operands. */ |
4ee9c684 | 476 | |
477 | static inline void | |
5b110d39 | 478 | append_use (tree *use_p) |
4ee9c684 | 479 | { |
f1f41a6c | 480 | build_uses.safe_push ((tree) use_p); |
4ee9c684 | 481 | } |
482 | ||
483 | ||
4fb5e5ca | 484 | /* Add VAR to the set of variables that require a VDEF operator. */ |
4ee9c684 | 485 | |
5b110d39 | 486 | static inline void |
4fb5e5ca | 487 | append_vdef (tree var) |
4ee9c684 | 488 | { |
dd277d48 | 489 | gcc_assert ((build_vdef == NULL_TREE |
490 | || build_vdef == var) | |
491 | && (build_vuse == NULL_TREE | |
492 | || build_vuse == var)); | |
4fb5e5ca | 493 | |
dd277d48 | 494 | build_vdef = var; |
495 | build_vuse = var; | |
4ee9c684 | 496 | } |
497 | ||
498 | ||
4fb5e5ca | 499 | /* Add VAR to the set of variables that require a VUSE operator. */ |
4ee9c684 | 500 | |
5b110d39 | 501 | static inline void |
502 | append_vuse (tree var) | |
4ee9c684 | 503 | { |
dd277d48 | 504 | gcc_assert (build_vuse == NULL_TREE |
505 | || build_vuse == var); | |
4ee9c684 | 506 | |
dd277d48 | 507 | build_vuse = var; |
22aa74c4 | 508 | } |
509 | ||
dd277d48 | 510 | /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ |
f0e6e3c1 | 511 | |
dd277d48 | 512 | static void |
861b4e39 | 513 | add_virtual_operand (struct function *fn, |
514 | gimple stmt ATTRIBUTE_UNUSED, int flags) | |
dd277d48 | 515 | { |
516 | /* Add virtual operands to the stmt, unless the caller has specifically | |
517 | requested not to do that (used when adding operands inside an | |
518 | ADDR_EXPR expression). */ | |
519 | if (flags & opf_no_vops) | |
520 | return; | |
521 | ||
9845d120 | 522 | gcc_assert (!is_gimple_debug (stmt)); |
523 | ||
dd277d48 | 524 | if (flags & opf_def) |
861b4e39 | 525 | append_vdef (gimple_vop (fn)); |
dd277d48 | 526 | else |
861b4e39 | 527 | append_vuse (gimple_vop (fn)); |
b66731e8 | 528 | } |
529 | ||
b66731e8 | 530 | |
75a70cf9 | 531 | /* Add *VAR_P to the appropriate operand array for statement STMT. |
532 | FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, | |
533 | it will be added to the statement's real operands, otherwise it is | |
534 | added to virtual operands. */ | |
fa999566 | 535 | |
536 | static void | |
861b4e39 | 537 | add_stmt_operand (struct function *fn, tree *var_p, gimple stmt, int flags) |
b66731e8 | 538 | { |
2f4ec87c | 539 | tree var = *var_p; |
b66731e8 | 540 | |
75a70cf9 | 541 | gcc_assert (SSA_VAR_P (*var_p)); |
b66731e8 | 542 | |
2f4ec87c | 543 | if (is_gimple_reg (var)) |
b66731e8 | 544 | { |
fa999566 | 545 | /* The variable is a GIMPLE register. Add it to real operands. */ |
4fb5e5ca | 546 | if (flags & opf_def) |
5bb6976b | 547 | ; |
fa999566 | 548 | else |
549 | append_use (var_p); | |
5bb6976b | 550 | if (DECL_P (*var_p)) |
861b4e39 | 551 | fn->gimple_df->ssa_renaming_needed = 1; |
b66731e8 | 552 | } |
fa999566 | 553 | else |
2f4ec87c | 554 | { |
555 | /* Mark statements with volatile operands. */ | |
556 | if (!(flags & opf_no_vops) | |
557 | && TREE_THIS_VOLATILE (var)) | |
558 | gimple_set_has_volatile_ops (stmt, true); | |
559 | ||
560 | /* The variable is a memory access. Add virtual operands. */ | |
861b4e39 | 561 | add_virtual_operand (fn, stmt, flags); |
2f4ec87c | 562 | } |
fa999566 | 563 | } |
b66731e8 | 564 | |
6d5ec6f8 | 565 | /* Mark the base address of REF as having its address taken. |
566 | REF may be a single variable whose address has been taken or any | |
567 | other valid GIMPLE memory reference (structure reference, array, | |
568 | etc). */ | |
b66731e8 | 569 | |
fa999566 | 570 | static void |
6d5ec6f8 | 571 | mark_address_taken (tree ref) |
4ec25329 | 572 | { |
dd277d48 | 573 | tree var; |
b66731e8 | 574 | |
dd277d48 | 575 | /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF |
576 | as the only thing we take the address of. If VAR is a structure, | |
577 | taking the address of a field means that the whole structure may | |
578 | be referenced using pointer arithmetic. See PR 21407 and the | |
579 | ensuing mailing list discussion. */ | |
580 | var = get_base_address (ref); | |
182cf5a9 | 581 | if (var) |
582 | { | |
583 | if (DECL_P (var)) | |
584 | TREE_ADDRESSABLE (var) = 1; | |
585 | else if (TREE_CODE (var) == MEM_REF | |
586 | && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR | |
587 | && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0))) | |
588 | TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1; | |
589 | } | |
22aa74c4 | 590 | } |
591 | ||
4ec25329 | 592 | |
5d9de213 | 593 | /* A subroutine of get_expr_operands to handle MEM_REF. |
cb7f680b | 594 | |
182cf5a9 | 595 | STMT is the statement being processed, EXPR is the MEM_REF |
cb7f680b | 596 | that got us here. |
48e1416a | 597 | |
5bb6976b | 598 | FLAGS is as in get_expr_operands. */ |
cb7f680b | 599 | |
600 | static void | |
1ce90822 | 601 | get_mem_ref_operands (struct function *fn, |
602 | gimple stmt, tree expr, int flags) | |
cb7f680b | 603 | { |
604 | tree *pptr = &TREE_OPERAND (expr, 0); | |
cb7f680b | 605 | |
587838bb | 606 | if (!(flags & opf_no_vops) |
607 | && TREE_THIS_VOLATILE (expr)) | |
75a70cf9 | 608 | gimple_set_has_volatile_ops (stmt, true); |
cb7f680b | 609 | |
dd277d48 | 610 | /* Add the VOP. */ |
861b4e39 | 611 | add_virtual_operand (fn, stmt, flags); |
dd277d48 | 612 | |
613 | /* If requested, add a USE operand for the base pointer. */ | |
861b4e39 | 614 | get_expr_operands (fn, stmt, pptr, |
5bb6976b | 615 | opf_non_addressable | opf_use |
616 | | (flags & (opf_no_vops|opf_not_non_addressable))); | |
cb7f680b | 617 | } |
a002e999 | 618 | |
4ec25329 | 619 | |
fa999566 | 620 | /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ |
4ee9c684 | 621 | |
622 | static void | |
861b4e39 | 623 | get_tmr_operands (struct function *fn, gimple stmt, tree expr, int flags) |
4ee9c684 | 624 | { |
587838bb | 625 | if (!(flags & opf_no_vops) |
626 | && TREE_THIS_VOLATILE (expr)) | |
9c44b395 | 627 | gimple_set_has_volatile_ops (stmt, true); |
628 | ||
4fb5e5ca | 629 | /* First record the real operands. */ |
861b4e39 | 630 | get_expr_operands (fn, stmt, |
631 | &TMR_BASE (expr), opf_use | (flags & opf_no_vops)); | |
632 | get_expr_operands (fn, stmt, | |
633 | &TMR_INDEX (expr), opf_use | (flags & opf_no_vops)); | |
634 | get_expr_operands (fn, stmt, | |
635 | &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops)); | |
636 | ||
637 | add_virtual_operand (fn, stmt, flags); | |
fa999566 | 638 | } |
639 | ||
640 | ||
75a70cf9 | 641 | /* If STMT is a call that may clobber globals and other symbols that |
642 | escape, add them to the VDEF/VUSE lists for it. */ | |
fa999566 | 643 | |
644 | static void | |
1a91d914 | 645 | maybe_add_call_vops (struct function *fn, gcall *stmt) |
fa999566 | 646 | { |
75a70cf9 | 647 | int call_flags = gimple_call_flags (stmt); |
fa999566 | 648 | |
4fb5e5ca | 649 | /* If aliases have been computed already, add VDEF or VUSE |
fa999566 | 650 | operands for all the symbols that have been found to be |
4fb5e5ca | 651 | call-clobbered. */ |
dd277d48 | 652 | if (!(call_flags & ECF_NOVOPS)) |
fa999566 | 653 | { |
66be7346 | 654 | /* A 'pure' or a 'const' function never call-clobbers anything. */ |
655 | if (!(call_flags & (ECF_PURE | ECF_CONST))) | |
861b4e39 | 656 | add_virtual_operand (fn, stmt, opf_def); |
fa999566 | 657 | else if (!(call_flags & ECF_CONST)) |
861b4e39 | 658 | add_virtual_operand (fn, stmt, opf_use); |
fa999566 | 659 | } |
fa999566 | 660 | } |
661 | ||
662 | ||
663 | /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ | |
664 | ||
665 | static void | |
1a91d914 | 666 | get_asm_stmt_operands (struct function *fn, gasm *stmt) |
fa999566 | 667 | { |
75a70cf9 | 668 | size_t i, noutputs; |
4fb5e5ca | 669 | const char **oconstraints; |
fa999566 | 670 | const char *constraint; |
671 | bool allows_mem, allows_reg, is_inout; | |
4fb5e5ca | 672 | |
75a70cf9 | 673 | noutputs = gimple_asm_noutputs (stmt); |
4fb5e5ca | 674 | oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); |
fa999566 | 675 | |
4fb5e5ca | 676 | /* Gather all output operands. */ |
75a70cf9 | 677 | for (i = 0; i < gimple_asm_noutputs (stmt); i++) |
fa999566 | 678 | { |
75a70cf9 | 679 | tree link = gimple_asm_output_op (stmt, i); |
f6255040 | 680 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
681 | oconstraints[i] = constraint; | |
682 | parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
683 | &allows_reg, &is_inout); | |
fa999566 | 684 | |
685 | /* This should have been split in gimplify_asm_expr. */ | |
686 | gcc_assert (!allows_reg || !is_inout); | |
687 | ||
688 | /* Memory operands are addressable. Note that STMT needs the | |
689 | address of this operand. */ | |
690 | if (!allows_reg && allows_mem) | |
7f2d9047 | 691 | mark_address_taken (TREE_VALUE (link)); |
fa999566 | 692 | |
861b4e39 | 693 | get_expr_operands (fn, stmt, |
694 | &TREE_VALUE (link), opf_def | opf_not_non_addressable); | |
fa999566 | 695 | } |
696 | ||
4fb5e5ca | 697 | /* Gather all input operands. */ |
75a70cf9 | 698 | for (i = 0; i < gimple_asm_ninputs (stmt); i++) |
fa999566 | 699 | { |
75a70cf9 | 700 | tree link = gimple_asm_input_op (stmt, i); |
fa999566 | 701 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
4fb5e5ca | 702 | parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, |
703 | &allows_mem, &allows_reg); | |
fa999566 | 704 | |
705 | /* Memory operands are addressable. Note that STMT needs the | |
706 | address of this operand. */ | |
707 | if (!allows_reg && allows_mem) | |
7f2d9047 | 708 | mark_address_taken (TREE_VALUE (link)); |
fa999566 | 709 | |
861b4e39 | 710 | get_expr_operands (fn, stmt, &TREE_VALUE (link), opf_not_non_addressable); |
fa999566 | 711 | } |
712 | ||
4fb5e5ca | 713 | /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ |
97cf41ec | 714 | if (gimple_asm_clobbers_memory_p (stmt)) |
861b4e39 | 715 | add_virtual_operand (fn, stmt, opf_def); |
f6255040 | 716 | } |
717 | ||
718 | ||
fa999566 | 719 | /* Recursively scan the expression pointed to by EXPR_P in statement |
f6255040 | 720 | STMT. FLAGS is one of the OPF_* constants modifying how to |
721 | interpret the operands found. */ | |
fa999566 | 722 | |
723 | static void | |
861b4e39 | 724 | get_expr_operands (struct function *fn, gimple stmt, tree *expr_p, int flags) |
fa999566 | 725 | { |
726 | enum tree_code code; | |
f0d6e81c | 727 | enum tree_code_class codeclass; |
fa999566 | 728 | tree expr = *expr_p; |
9845d120 | 729 | int uflags = opf_use; |
fa999566 | 730 | |
731 | if (expr == NULL) | |
732 | return; | |
733 | ||
9845d120 | 734 | if (is_gimple_debug (stmt)) |
735 | uflags |= (flags & opf_no_vops); | |
736 | ||
fa999566 | 737 | code = TREE_CODE (expr); |
f0d6e81c | 738 | codeclass = TREE_CODE_CLASS (code); |
fa999566 | 739 | |
740 | switch (code) | |
741 | { | |
742 | case ADDR_EXPR: | |
743 | /* Taking the address of a variable does not represent a | |
744 | reference to it, but the fact that the statement takes its | |
745 | address will be of interest to some passes (e.g. alias | |
746 | resolution). */ | |
182cf5a9 | 747 | if ((!(flags & opf_non_addressable) |
748 | || (flags & opf_not_non_addressable)) | |
749 | && !is_gimple_debug (stmt)) | |
9845d120 | 750 | mark_address_taken (TREE_OPERAND (expr, 0)); |
fa999566 | 751 | |
fa999566 | 752 | /* Otherwise, there may be variables referenced inside but there |
753 | should be no VUSEs created, since the referenced objects are | |
754 | not really accessed. The only operands that we should find | |
755 | here are ARRAY_REF indices which will always be real operands | |
756 | (GIMPLE does not allow non-registers as array indices). */ | |
757 | flags |= opf_no_vops; | |
861b4e39 | 758 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), |
1ce90822 | 759 | flags | opf_not_non_addressable | opf_address_taken); |
fa999566 | 760 | return; |
761 | ||
762 | case SSA_NAME: | |
fa999566 | 763 | case VAR_DECL: |
764 | case PARM_DECL: | |
765 | case RESULT_DECL: | |
1ce90822 | 766 | if (!(flags & opf_address_taken)) |
767 | add_stmt_operand (fn, expr_p, stmt, flags); | |
2afb4be3 | 768 | return; |
fa999566 | 769 | |
688ff29b | 770 | case DEBUG_EXPR_DECL: |
771 | gcc_assert (gimple_debug_bind_p (stmt)); | |
772 | return; | |
773 | ||
182cf5a9 | 774 | case MEM_REF: |
1ce90822 | 775 | get_mem_ref_operands (fn, stmt, expr, flags); |
fa999566 | 776 | return; |
777 | ||
778 | case TARGET_MEM_REF: | |
861b4e39 | 779 | get_tmr_operands (fn, stmt, expr, flags); |
fa999566 | 780 | return; |
781 | ||
fa999566 | 782 | case ARRAY_REF: |
f6255040 | 783 | case ARRAY_RANGE_REF: |
fa999566 | 784 | case COMPONENT_REF: |
785 | case REALPART_EXPR: | |
786 | case IMAGPART_EXPR: | |
787 | { | |
587838bb | 788 | if (!(flags & opf_no_vops) |
789 | && TREE_THIS_VOLATILE (expr)) | |
75a70cf9 | 790 | gimple_set_has_volatile_ops (stmt, true); |
8e4c4d3b | 791 | |
861b4e39 | 792 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags); |
48e1416a | 793 | |
2be14d8b | 794 | if (code == COMPONENT_REF) |
7fecfde9 | 795 | { |
587838bb | 796 | if (!(flags & opf_no_vops) |
797 | && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) | |
75a70cf9 | 798 | gimple_set_has_volatile_ops (stmt, true); |
861b4e39 | 799 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags); |
7fecfde9 | 800 | } |
f6255040 | 801 | else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) |
03c253f3 | 802 | { |
861b4e39 | 803 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags); |
804 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags); | |
805 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 3), uflags); | |
03c253f3 | 806 | } |
a002e999 | 807 | |
2be14d8b | 808 | return; |
809 | } | |
a002e999 | 810 | |
80f06481 | 811 | case WITH_SIZE_EXPR: |
454b4e1f | 812 | /* WITH_SIZE_EXPR is a pass-through reference to its first argument, |
80f06481 | 813 | and an rvalue reference to its second argument. */ |
861b4e39 | 814 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags); |
815 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags); | |
80f06481 | 816 | return; |
817 | ||
07c03fb0 | 818 | case COND_EXPR: |
bd2ec699 | 819 | case VEC_COND_EXPR: |
f4803722 | 820 | case VEC_PERM_EXPR: |
861b4e39 | 821 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), uflags); |
822 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags); | |
823 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags); | |
07c03fb0 | 824 | return; |
825 | ||
f9c6943b | 826 | case CONSTRUCTOR: |
827 | { | |
828 | /* General aggregate CONSTRUCTORs have been decomposed, but they | |
829 | are still in use as the COMPLEX_EXPR equivalent for vectors. */ | |
c75b4594 | 830 | constructor_elt *ce; |
831 | unsigned HOST_WIDE_INT idx; | |
f9c6943b | 832 | |
3c25489e | 833 | /* A volatile constructor is actually TREE_CLOBBER_P, transfer |
834 | the volatility to the statement, don't use TREE_CLOBBER_P for | |
835 | mirroring the other uses of THIS_VOLATILE in this file. */ | |
587838bb | 836 | if (!(flags & opf_no_vops) |
837 | && TREE_THIS_VOLATILE (expr)) | |
3c25489e | 838 | gimple_set_has_volatile_ops (stmt, true); |
839 | ||
c75b4594 | 840 | for (idx = 0; |
f1f41a6c | 841 | vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce); |
c75b4594 | 842 | idx++) |
861b4e39 | 843 | get_expr_operands (fn, stmt, &ce->value, uflags); |
f9c6943b | 844 | |
845 | return; | |
846 | } | |
847 | ||
c9a1e1e0 | 848 | case BIT_FIELD_REF: |
587838bb | 849 | if (!(flags & opf_no_vops) |
850 | && TREE_THIS_VOLATILE (expr)) | |
1e342984 | 851 | gimple_set_has_volatile_ops (stmt, true); |
852 | /* FALLTHRU */ | |
853 | ||
2c0bc8ce | 854 | case VIEW_CONVERT_EXPR: |
c9a1e1e0 | 855 | do_unary: |
861b4e39 | 856 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags); |
4ee9c684 | 857 | return; |
4ee9c684 | 858 | |
c9a1e1e0 | 859 | case COMPOUND_EXPR: |
860 | case OBJ_TYPE_REF: | |
88dbf20f | 861 | case ASSERT_EXPR: |
c9a1e1e0 | 862 | do_binary: |
863 | { | |
861b4e39 | 864 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags); |
865 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags); | |
c9a1e1e0 | 866 | return; |
867 | } | |
868 | ||
4a61a337 | 869 | case DOT_PROD_EXPR: |
a2287001 | 870 | case SAD_EXPR: |
b056d812 | 871 | case REALIGN_LOAD_EXPR: |
00f4f705 | 872 | case WIDEN_MULT_PLUS_EXPR: |
873 | case WIDEN_MULT_MINUS_EXPR: | |
156f51b9 | 874 | case FMA_EXPR: |
b056d812 | 875 | { |
861b4e39 | 876 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags); |
877 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags); | |
878 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), flags); | |
00f4f705 | 879 | return; |
b056d812 | 880 | } |
881 | ||
c9a1e1e0 | 882 | case FUNCTION_DECL: |
c9a1e1e0 | 883 | case LABEL_DECL: |
bef99423 | 884 | case CONST_DECL: |
75a70cf9 | 885 | case CASE_LABEL_EXPR: |
fa999566 | 886 | /* Expressions that make no memory references. */ |
c9a1e1e0 | 887 | return; |
fa999566 | 888 | |
889 | default: | |
f0d6e81c | 890 | if (codeclass == tcc_unary) |
fa999566 | 891 | goto do_unary; |
f0d6e81c | 892 | if (codeclass == tcc_binary || codeclass == tcc_comparison) |
fa999566 | 893 | goto do_binary; |
f0d6e81c | 894 | if (codeclass == tcc_constant || codeclass == tcc_type) |
fa999566 | 895 | return; |
a002e999 | 896 | } |
c9a1e1e0 | 897 | |
fa999566 | 898 | /* If we get here, something has gone wrong. */ |
899 | #ifdef ENABLE_CHECKING | |
900 | fprintf (stderr, "unhandled expression in get_expr_operands():\n"); | |
901 | debug_tree (expr); | |
902 | fputs ("\n", stderr); | |
903 | #endif | |
904 | gcc_unreachable (); | |
c9a1e1e0 | 905 | } |
906 | ||
a002e999 | 907 | |
f6255040 | 908 | /* Parse STMT looking for operands. When finished, the various |
909 | build_* operand vectors will have potential operands in them. */ | |
910 | ||
aed164c3 | 911 | static void |
861b4e39 | 912 | parse_ssa_operands (struct function *fn, gimple stmt) |
aed164c3 | 913 | { |
75a70cf9 | 914 | enum gimple_code code = gimple_code (stmt); |
b65fbe25 | 915 | size_t i, n, start = 0; |
aed164c3 | 916 | |
b65fbe25 | 917 | switch (code) |
9845d120 | 918 | { |
b65fbe25 | 919 | case GIMPLE_ASM: |
1a91d914 | 920 | get_asm_stmt_operands (fn, as_a <gasm *> (stmt)); |
b65fbe25 | 921 | break; |
922 | ||
923 | case GIMPLE_TRANSACTION: | |
924 | /* The start of a transaction is a memory barrier. */ | |
861b4e39 | 925 | add_virtual_operand (fn, stmt, opf_def | opf_use); |
b65fbe25 | 926 | break; |
927 | ||
928 | case GIMPLE_DEBUG: | |
9845d120 | 929 | if (gimple_debug_bind_p (stmt) |
930 | && gimple_debug_bind_has_value_p (stmt)) | |
861b4e39 | 931 | get_expr_operands (fn, stmt, gimple_debug_bind_get_value_ptr (stmt), |
9845d120 | 932 | opf_use | opf_no_vops); |
b65fbe25 | 933 | break; |
fa999566 | 934 | |
b65fbe25 | 935 | case GIMPLE_RETURN: |
861b4e39 | 936 | append_vuse (gimple_vop (fn)); |
b65fbe25 | 937 | goto do_default; |
fa999566 | 938 | |
b65fbe25 | 939 | case GIMPLE_CALL: |
75a70cf9 | 940 | /* Add call-clobbered operands, if needed. */ |
1a91d914 | 941 | maybe_add_call_vops (fn, as_a <gcall *> (stmt)); |
b65fbe25 | 942 | /* FALLTHRU */ |
2109076a | 943 | |
b65fbe25 | 944 | case GIMPLE_ASSIGN: |
861b4e39 | 945 | get_expr_operands (fn, stmt, gimple_op_ptr (stmt, 0), opf_def); |
b65fbe25 | 946 | start = 1; |
947 | /* FALLTHRU */ | |
948 | ||
949 | default: | |
950 | do_default: | |
951 | n = gimple_num_ops (stmt); | |
952 | for (i = start; i < n; i++) | |
861b4e39 | 953 | get_expr_operands (fn, stmt, gimple_op_ptr (stmt, i), opf_use); |
b65fbe25 | 954 | break; |
ca9c9daf | 955 | } |
aed164c3 | 956 | } |
957 | ||
a002e999 | 958 | |
fa999566 | 959 | /* Create an operands cache for STMT. */ |
c9a1e1e0 | 960 | |
961 | static void | |
861b4e39 | 962 | build_ssa_operands (struct function *fn, gimple stmt) |
c9a1e1e0 | 963 | { |
6d5ec6f8 | 964 | /* Initially assume that the statement has no volatile operands. */ |
75a70cf9 | 965 | gimple_set_has_volatile_ops (stmt, false); |
75a70cf9 | 966 | |
fa999566 | 967 | start_ssa_stmt_operands (); |
861b4e39 | 968 | parse_ssa_operands (fn, stmt); |
969 | finalize_ssa_stmt_operands (fn, stmt); | |
fa999566 | 970 | } |
39b644e9 | 971 | |
85f3d834 | 972 | /* Verifies SSA statement operands. */ |
973 | ||
974 | DEBUG_FUNCTION bool | |
861b4e39 | 975 | verify_ssa_operands (struct function *fn, gimple stmt) |
85f3d834 | 976 | { |
977 | use_operand_p use_p; | |
978 | def_operand_p def_p; | |
979 | ssa_op_iter iter; | |
980 | unsigned i; | |
981 | tree use, def; | |
982 | bool volatile_p = gimple_has_volatile_ops (stmt); | |
983 | ||
984 | /* build_ssa_operands w/o finalizing them. */ | |
985 | gimple_set_has_volatile_ops (stmt, false); | |
986 | start_ssa_stmt_operands (); | |
861b4e39 | 987 | parse_ssa_operands (fn, stmt); |
85f3d834 | 988 | |
989 | /* Now verify the built operands are the same as present in STMT. */ | |
990 | def = gimple_vdef (stmt); | |
991 | if (def | |
992 | && TREE_CODE (def) == SSA_NAME) | |
993 | def = SSA_NAME_VAR (def); | |
994 | if (build_vdef != def) | |
995 | { | |
996 | error ("virtual definition of statement not up-to-date"); | |
997 | return true; | |
998 | } | |
999 | if (gimple_vdef (stmt) | |
1000 | && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P | |
1001 | || DEF_FROM_PTR (def_p) != gimple_vdef (stmt))) | |
1002 | { | |
1003 | error ("virtual def operand missing for stmt"); | |
1004 | return true; | |
1005 | } | |
1006 | ||
1007 | use = gimple_vuse (stmt); | |
1008 | if (use | |
1009 | && TREE_CODE (use) == SSA_NAME) | |
1010 | use = SSA_NAME_VAR (use); | |
1011 | if (build_vuse != use) | |
1012 | { | |
1013 | error ("virtual use of statement not up-to-date"); | |
1014 | return true; | |
1015 | } | |
1016 | if (gimple_vuse (stmt) | |
1017 | && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P | |
1018 | || USE_FROM_PTR (use_p) != gimple_vuse (stmt))) | |
1019 | { | |
1020 | error ("virtual use operand missing for stmt"); | |
1021 | return true; | |
1022 | } | |
1023 | ||
1024 | FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) | |
1025 | { | |
f1f41a6c | 1026 | FOR_EACH_VEC_ELT (build_uses, i, use) |
85f3d834 | 1027 | { |
1028 | if (use_p->use == (tree *)use) | |
1029 | { | |
f1f41a6c | 1030 | build_uses[i] = NULL_TREE; |
85f3d834 | 1031 | break; |
1032 | } | |
1033 | } | |
f1f41a6c | 1034 | if (i == build_uses.length ()) |
85f3d834 | 1035 | { |
1036 | error ("excess use operand for stmt"); | |
1037 | debug_generic_expr (USE_FROM_PTR (use_p)); | |
1038 | return true; | |
1039 | } | |
1040 | } | |
f1f41a6c | 1041 | FOR_EACH_VEC_ELT (build_uses, i, use) |
85f3d834 | 1042 | if (use != NULL_TREE) |
1043 | { | |
1044 | error ("use operand missing for stmt"); | |
1045 | debug_generic_expr (*(tree *)use); | |
1046 | return true; | |
1047 | } | |
1048 | ||
85f3d834 | 1049 | if (gimple_has_volatile_ops (stmt) != volatile_p) |
1050 | { | |
1051 | error ("stmt volatile flag not up-to-date"); | |
1052 | return true; | |
1053 | } | |
1054 | ||
1055 | cleanup_build_arrays (); | |
1056 | return false; | |
1057 | } | |
1058 | ||
4ec25329 | 1059 | |
28c92cbb | 1060 | /* Releases the operands of STMT back to their freelists, and clears |
1061 | the stmt operand lists. */ | |
1062 | ||
1063 | void | |
861b4e39 | 1064 | free_stmt_operands (struct function *fn, gimple stmt) |
28c92cbb | 1065 | { |
75a70cf9 | 1066 | use_optype_p uses = gimple_use_ops (stmt), last_use; |
28c92cbb | 1067 | |
28c92cbb | 1068 | if (uses) |
1069 | { | |
1070 | for (last_use = uses; last_use->next; last_use = last_use->next) | |
1071 | delink_imm_use (USE_OP_PTR (last_use)); | |
1072 | delink_imm_use (USE_OP_PTR (last_use)); | |
861b4e39 | 1073 | last_use->next = gimple_ssa_operands (fn)->free_uses; |
1074 | gimple_ssa_operands (fn)->free_uses = uses; | |
75a70cf9 | 1075 | gimple_set_use_ops (stmt, NULL); |
28c92cbb | 1076 | } |
1077 | ||
75a70cf9 | 1078 | if (gimple_has_mem_ops (stmt)) |
1079 | { | |
dd277d48 | 1080 | gimple_set_vuse (stmt, NULL_TREE); |
1081 | gimple_set_vdef (stmt, NULL_TREE); | |
75a70cf9 | 1082 | } |
c9a1e1e0 | 1083 | } |
1084 | ||
0b3f639d | 1085 | |
7dd75889 | 1086 | /* Get the operands of statement STMT. */ |
a002e999 | 1087 | |
fa999566 | 1088 | void |
861b4e39 | 1089 | update_stmt_operands (struct function *fn, gimple stmt) |
fa999566 | 1090 | { |
f6255040 | 1091 | /* If update_stmt_operands is called before SSA is initialized, do |
1092 | nothing. */ | |
861b4e39 | 1093 | if (!ssa_operands_active (fn)) |
fa999566 | 1094 | return; |
2b99acb8 | 1095 | |
fa999566 | 1096 | timevar_push (TV_TREE_OPS); |
2b99acb8 | 1097 | |
75a70cf9 | 1098 | gcc_assert (gimple_modified_p (stmt)); |
861b4e39 | 1099 | build_ssa_operands (fn, stmt); |
75a70cf9 | 1100 | gimple_set_modified (stmt, false); |
4ee9c684 | 1101 | |
fa999566 | 1102 | timevar_pop (TV_TREE_OPS); |
1103 | } | |
b0b70f22 | 1104 | |
f6255040 | 1105 | |
fa999566 | 1106 | /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done |
1107 | to test the validity of the swap operation. */ | |
b0b70f22 | 1108 | |
fa999566 | 1109 | void |
8f6fa493 | 1110 | swap_ssa_operands (gimple stmt, tree *exp0, tree *exp1) |
fa999566 | 1111 | { |
1112 | tree op0, op1; | |
1113 | op0 = *exp0; | |
1114 | op1 = *exp1; | |
0b3f639d | 1115 | |
8f6fa493 | 1116 | if (op0 != op1) |
fa999566 | 1117 | { |
8f6fa493 | 1118 | /* Attempt to preserve the relative positions of these two operands in |
1119 | their * respective immediate use lists by adjusting their use pointer | |
1120 | to point to the new operand position. */ | |
fa999566 | 1121 | use_optype_p use0, use1, ptr; |
1122 | use0 = use1 = NULL; | |
0b3f639d | 1123 | |
fa999566 | 1124 | /* Find the 2 operands in the cache, if they are there. */ |
75a70cf9 | 1125 | for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) |
fa999566 | 1126 | if (USE_OP_PTR (ptr)->use == exp0) |
1127 | { | |
1128 | use0 = ptr; | |
1129 | break; | |
1130 | } | |
0b3f639d | 1131 | |
75a70cf9 | 1132 | for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) |
fa999566 | 1133 | if (USE_OP_PTR (ptr)->use == exp1) |
1134 | { | |
1135 | use1 = ptr; | |
1136 | break; | |
1137 | } | |
1138 | ||
f3f02af0 | 1139 | /* And adjust their location to point to the new position of the |
1140 | operand. */ | |
1141 | if (use0) | |
1142 | USE_OP_PTR (use0)->use = exp1; | |
1143 | if (use1) | |
1144 | USE_OP_PTR (use1)->use = exp0; | |
fa999566 | 1145 | |
8f6fa493 | 1146 | /* Now swap the data. */ |
1147 | *exp0 = op1; | |
1148 | *exp1 = op0; | |
1149 | } | |
0b3f639d | 1150 | } |
1151 | ||
75a70cf9 | 1152 | |
22aa74c4 | 1153 | /* Scan the immediate_use list for VAR making sure its linked properly. |
f6255040 | 1154 | Return TRUE if there is a problem and emit an error message to F. */ |
22aa74c4 | 1155 | |
4b987fac | 1156 | DEBUG_FUNCTION bool |
22aa74c4 | 1157 | verify_imm_links (FILE *f, tree var) |
1158 | { | |
b66731e8 | 1159 | use_operand_p ptr, prev, list; |
22aa74c4 | 1160 | int count; |
1161 | ||
1162 | gcc_assert (TREE_CODE (var) == SSA_NAME); | |
1163 | ||
1164 | list = &(SSA_NAME_IMM_USE_NODE (var)); | |
1165 | gcc_assert (list->use == NULL); | |
1166 | ||
1167 | if (list->prev == NULL) | |
1168 | { | |
1169 | gcc_assert (list->next == NULL); | |
1170 | return false; | |
1171 | } | |
1172 | ||
1173 | prev = list; | |
1174 | count = 0; | |
1175 | for (ptr = list->next; ptr != list; ) | |
1176 | { | |
1177 | if (prev != ptr->prev) | |
1fa3a8f6 | 1178 | goto error; |
48e1416a | 1179 | |
22aa74c4 | 1180 | if (ptr->use == NULL) |
1fa3a8f6 | 1181 | goto error; /* 2 roots, or SAFE guard node. */ |
1182 | else if (*(ptr->use) != var) | |
1183 | goto error; | |
22aa74c4 | 1184 | |
1185 | prev = ptr; | |
1186 | ptr = ptr->next; | |
a002e999 | 1187 | |
1188 | /* Avoid infinite loops. 50,000,000 uses probably indicates a | |
1189 | problem. */ | |
f04f077c | 1190 | if (count++ > 50000000) |
1fa3a8f6 | 1191 | goto error; |
22aa74c4 | 1192 | } |
1193 | ||
1194 | /* Verify list in the other direction. */ | |
1195 | prev = list; | |
1196 | for (ptr = list->prev; ptr != list; ) | |
1197 | { | |
1198 | if (prev != ptr->next) | |
1fa3a8f6 | 1199 | goto error; |
22aa74c4 | 1200 | prev = ptr; |
1201 | ptr = ptr->prev; | |
1202 | if (count-- < 0) | |
1fa3a8f6 | 1203 | goto error; |
22aa74c4 | 1204 | } |
1205 | ||
1206 | if (count != 0) | |
1fa3a8f6 | 1207 | goto error; |
22aa74c4 | 1208 | |
1209 | return false; | |
1fa3a8f6 | 1210 | |
1211 | error: | |
75a70cf9 | 1212 | if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt)) |
1fa3a8f6 | 1213 | { |
75a70cf9 | 1214 | fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt); |
1215 | print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); | |
1fa3a8f6 | 1216 | } |
48e1416a | 1217 | fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, |
1fa3a8f6 | 1218 | (void *)ptr->use); |
1219 | print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); | |
9af5ce0c | 1220 | fprintf (f, "\n"); |
1fa3a8f6 | 1221 | return true; |
22aa74c4 | 1222 | } |
1223 | ||
1224 | ||
1225 | /* Dump all the immediate uses to FILE. */ | |
1226 | ||
1227 | void | |
1228 | dump_immediate_uses_for (FILE *file, tree var) | |
1229 | { | |
1230 | imm_use_iterator iter; | |
1231 | use_operand_p use_p; | |
1232 | ||
1233 | gcc_assert (var && TREE_CODE (var) == SSA_NAME); | |
1234 | ||
1235 | print_generic_expr (file, var, TDF_SLIM); | |
1236 | fprintf (file, " : -->"); | |
1237 | if (has_zero_uses (var)) | |
1238 | fprintf (file, " no uses.\n"); | |
1239 | else | |
1240 | if (has_single_use (var)) | |
1241 | fprintf (file, " single use.\n"); | |
1242 | else | |
1243 | fprintf (file, "%d uses.\n", num_imm_uses (var)); | |
1244 | ||
1245 | FOR_EACH_IMM_USE_FAST (use_p, iter, var) | |
1246 | { | |
75a70cf9 | 1247 | if (use_p->loc.stmt == NULL && use_p->use == NULL) |
66c8f3a9 | 1248 | fprintf (file, "***end of stmt iterator marker***\n"); |
b66731e8 | 1249 | else |
66c8f3a9 | 1250 | if (!is_gimple_reg (USE_FROM_PTR (use_p))) |
75a70cf9 | 1251 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); |
66c8f3a9 | 1252 | else |
75a70cf9 | 1253 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); |
22aa74c4 | 1254 | } |
9af5ce0c | 1255 | fprintf (file, "\n"); |
22aa74c4 | 1256 | } |
1257 | ||
a002e999 | 1258 | |
22aa74c4 | 1259 | /* Dump all the immediate uses to FILE. */ |
1260 | ||
1261 | void | |
1262 | dump_immediate_uses (FILE *file) | |
1263 | { | |
1264 | tree var; | |
1265 | unsigned int x; | |
1266 | ||
1267 | fprintf (file, "Immediate_uses: \n\n"); | |
1268 | for (x = 1; x < num_ssa_names; x++) | |
1269 | { | |
9af5ce0c | 1270 | var = ssa_name (x); |
22aa74c4 | 1271 | if (!var) |
1272 | continue; | |
1273 | dump_immediate_uses_for (file, var); | |
1274 | } | |
1275 | } | |
1276 | ||
1277 | ||
1278 | /* Dump def-use edges on stderr. */ | |
1279 | ||
4b987fac | 1280 | DEBUG_FUNCTION void |
22aa74c4 | 1281 | debug_immediate_uses (void) |
1282 | { | |
1283 | dump_immediate_uses (stderr); | |
1284 | } | |
1285 | ||
f6255040 | 1286 | |
22aa74c4 | 1287 | /* Dump def-use edges on stderr. */ |
1288 | ||
4b987fac | 1289 | DEBUG_FUNCTION void |
22aa74c4 | 1290 | debug_immediate_uses_for (tree var) |
1291 | { | |
1292 | dump_immediate_uses_for (stderr, var); | |
5b110d39 | 1293 | } |
de6ed584 | 1294 | |
1295 | ||
dd277d48 | 1296 | /* Unlink STMTs virtual definition from the IL by propagating its use. */ |
1297 | ||
1298 | void | |
1299 | unlink_stmt_vdef (gimple stmt) | |
1300 | { | |
1301 | use_operand_p use_p; | |
1302 | imm_use_iterator iter; | |
1303 | gimple use_stmt; | |
1304 | tree vdef = gimple_vdef (stmt); | |
13ff78a4 | 1305 | tree vuse = gimple_vuse (stmt); |
dd277d48 | 1306 | |
1307 | if (!vdef | |
1308 | || TREE_CODE (vdef) != SSA_NAME) | |
1309 | return; | |
1310 | ||
13ff78a4 | 1311 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef) |
dd277d48 | 1312 | { |
1313 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
13ff78a4 | 1314 | SET_USE (use_p, vuse); |
dd277d48 | 1315 | } |
de6ed584 | 1316 | |
13ff78a4 | 1317 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)) |
1318 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1; | |
de6ed584 | 1319 | } |
dd277d48 | 1320 | |
8f6fa493 | 1321 | |
1322 | /* Return true if the var whose chain of uses starts at PTR has no | |
1323 | nondebug uses. */ | |
1324 | bool | |
1325 | has_zero_uses_1 (const ssa_use_operand_t *head) | |
1326 | { | |
1327 | const ssa_use_operand_t *ptr; | |
1328 | ||
1329 | for (ptr = head->next; ptr != head; ptr = ptr->next) | |
1330 | if (!is_gimple_debug (USE_STMT (ptr))) | |
1331 | return false; | |
1332 | ||
1333 | return true; | |
1334 | } | |
1335 | ||
1336 | ||
1337 | /* Return true if the var whose chain of uses starts at PTR has a | |
1338 | single nondebug use. Set USE_P and STMT to that single nondebug | |
1339 | use, if so, or to NULL otherwise. */ | |
1340 | bool | |
1341 | single_imm_use_1 (const ssa_use_operand_t *head, | |
1342 | use_operand_p *use_p, gimple *stmt) | |
1343 | { | |
1344 | ssa_use_operand_t *ptr, *single_use = 0; | |
1345 | ||
1346 | for (ptr = head->next; ptr != head; ptr = ptr->next) | |
1347 | if (!is_gimple_debug (USE_STMT (ptr))) | |
1348 | { | |
1349 | if (single_use) | |
1350 | { | |
1351 | single_use = NULL; | |
1352 | break; | |
1353 | } | |
1354 | single_use = ptr; | |
1355 | } | |
1356 | ||
1357 | if (use_p) | |
1358 | *use_p = single_use; | |
1359 | ||
1360 | if (stmt) | |
1361 | *stmt = single_use ? single_use->loc.stmt : NULL; | |
1362 | ||
1363 | return single_use; | |
1364 | } |