]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* SSA operands management for trees. |
85ec4feb | 2 | Copyright (C) 2003-2018 Free Software Foundation, Inc. |
6de9cd9a DN |
3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 8 | the Free Software Foundation; either version 3, or (at your option) |
6de9cd9a DN |
9 | any later version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
6de9cd9a DN |
19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
c7131fb2 | 23 | #include "backend.h" |
6de9cd9a | 24 | #include "tree.h" |
c7131fb2 | 25 | #include "gimple.h" |
957060b5 | 26 | #include "timevar.h" |
c7131fb2 | 27 | #include "ssa.h" |
957060b5 AM |
28 | #include "gimple-pretty-print.h" |
29 | #include "diagnostic-core.h" | |
d8a2d370 DN |
30 | #include "stmt.h" |
31 | #include "print-tree.h" | |
7ee2468b | 32 | #include "dumpfile.h" |
bc590dfb | 33 | |
1a24f92f | 34 | |
b8698a0f L |
35 | /* This file contains the code required to manage the operands cache of the |
36 | SSA optimizer. For every stmt, we maintain an operand cache in the stmt | |
37 | annotation. This cache contains operands that will be of interest to | |
38 | optimizers and other passes wishing to manipulate the IL. | |
1a24f92f | 39 | |
b8698a0f L |
40 | The operand type are broken up into REAL and VIRTUAL operands. The real |
41 | operands are represented as pointers into the stmt's operand tree. Thus | |
1a24f92f | 42 | any manipulation of the real operands will be reflected in the actual tree. |
b8698a0f L |
43 | Virtual operands are represented solely in the cache, although the base |
44 | variable for the SSA_NAME may, or may not occur in the stmt's tree. | |
1a24f92f AM |
45 | Manipulation of the virtual operands will not be reflected in the stmt tree. |
46 | ||
b8698a0f | 47 | The routines in this file are concerned with creating this operand cache |
1a24f92f AM |
48 | from a stmt tree. |
49 | ||
b8698a0f L |
50 | The operand tree is the parsed by the various get_* routines which look |
51 | through the stmt tree for the occurrence of operands which may be of | |
52 | interest, and calls are made to the append_* routines whenever one is | |
53 | found. There are 4 of these routines, each representing one of the | |
38635499 | 54 | 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. |
1a24f92f | 55 | |
b8698a0f | 56 | The append_* routines check for duplication, and simply keep a list of |
1a24f92f AM |
57 | unique objects for each operand type in the build_* extendable vectors. |
58 | ||
b8698a0f L |
59 | Once the stmt tree is completely parsed, the finalize_ssa_operands() |
60 | routine is called, which proceeds to perform the finalization routine | |
38635499 | 61 | on each of the 4 operand vectors which have been built up. |
1a24f92f | 62 | |
b8698a0f L |
63 | If the stmt had a previous operand cache, the finalization routines |
64 | attempt to match up the new operands with the old ones. If it's a perfect | |
65 | match, the old vector is simply reused. If it isn't a perfect match, then | |
66 | a new vector is created and the new operands are placed there. For | |
67 | virtual operands, if the previous cache had SSA_NAME version of a | |
68 | variable, and that same variable occurs in the same operands cache, then | |
1a24f92f AM |
69 | the new cache vector will also get the same SSA_NAME. |
70 | ||
28f6b1e4 DN |
71 | i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new |
72 | operand vector for VUSE, then the new vector will also be modified | |
73 | such that it contains 'a_5' rather than 'a'. */ | |
1a24f92f | 74 | |
38635499 | 75 | |
1e6a5d3c | 76 | /* Flags to describe operand properties in helpers. */ |
6de9cd9a DN |
77 | |
78 | /* By default, operands are loaded. */ | |
38635499 | 79 | #define opf_use 0 |
6de9cd9a | 80 | |
b8698a0f | 81 | /* Operand is the target of an assignment expression or a |
65ad7c63 | 82 | call-clobbered variable. */ |
38635499 | 83 | #define opf_def (1 << 0) |
a32b97a2 | 84 | |
6de9cd9a DN |
85 | /* No virtual operands should be created in the expression. This is used |
86 | when traversing ADDR_EXPR nodes which have different semantics than | |
87 | other expressions. Inside an ADDR_EXPR node, the only operands that we | |
88 | need to consider are indices into arrays. For instance, &a.b[i] should | |
89 | generate a USE of 'i' but it should not generate a VUSE for 'a' nor a | |
90 | VUSE for 'b'. */ | |
38635499 | 91 | #define opf_no_vops (1 << 1) |
6de9cd9a | 92 | |
70f34814 RG |
93 | /* Operand is in a place where address-taken does not imply addressable. */ |
94 | #define opf_non_addressable (1 << 3) | |
95 | ||
96 | /* Operand is in a place where opf_non_addressable does not apply. */ | |
97 | #define opf_not_non_addressable (1 << 4) | |
98 | ||
75fcf287 RB |
99 | /* Operand is having its address taken. */ |
100 | #define opf_address_taken (1 << 5) | |
101 | ||
6de9cd9a | 102 | /* Array for building all the use operands. */ |
572d790a | 103 | static vec<tree *> build_uses; |
6de9cd9a | 104 | |
5006671f RG |
105 | /* The built VDEF operand. */ |
106 | static tree build_vdef; | |
6de9cd9a | 107 | |
5006671f RG |
108 | /* The built VUSE operand. */ |
109 | static tree build_vuse; | |
6de9cd9a | 110 | |
b8698a0f | 111 | /* Bitmap obstack for our datastructures that needs to survive across |
04b5b56c | 112 | compilations of multiple functions. */ |
497f1b81 | 113 | static bitmap_obstack operands_bitmap_obstack; |
6e7e772d | 114 | |
355fe088 | 115 | static void get_expr_operands (struct function *, gimple *, tree *, int); |
02075bb2 | 116 | |
456cde30 JH |
117 | /* Number of functions with initialized ssa_operands. */ |
118 | static int n_initialized = 0; | |
1a24f92f | 119 | |
cc524fc7 AM |
120 | /* Accessor to tree-ssa-operands.c caches. */ |
121 | static inline struct ssa_operands * | |
122 | gimple_ssa_operands (const struct function *fun) | |
123 | { | |
124 | return &fun->gimple_df->ssa_operands; | |
125 | } | |
126 | ||
02075bb2 | 127 | |
65ad7c63 | 128 | /* Return true if the SSA operands cache is active. */ |
1a24f92f | 129 | |
f47c96aa | 130 | bool |
2eb712b4 | 131 | ssa_operands_active (struct function *fun) |
6de9cd9a | 132 | { |
2eb712b4 | 133 | if (fun == NULL) |
726a989a RB |
134 | return false; |
135 | ||
2eb712b4 | 136 | return fun->gimple_df && gimple_ssa_operands (fun)->ops_active; |
f47c96aa | 137 | } |
6de9cd9a | 138 | |
b8698a0f | 139 | |
5006671f RG |
140 | /* Create the VOP variable, an artificial global variable to act as a |
141 | representative of all of the virtual operands FUD chain. */ | |
02075bb2 | 142 | |
5006671f | 143 | static void |
3828719a | 144 | create_vop_var (struct function *fn) |
79f99d42 | 145 | { |
5006671f RG |
146 | tree global_var; |
147 | ||
3828719a | 148 | gcc_assert (fn->gimple_df->vop == NULL_TREE); |
5006671f | 149 | |
c2255bc4 AH |
150 | global_var = build_decl (BUILTINS_LOCATION, VAR_DECL, |
151 | get_identifier (".MEM"), | |
5006671f RG |
152 | void_type_node); |
153 | DECL_ARTIFICIAL (global_var) = 1; | |
61c7fb30 | 154 | DECL_IGNORED_P (global_var) = 1; |
5006671f RG |
155 | TREE_READONLY (global_var) = 0; |
156 | DECL_EXTERNAL (global_var) = 1; | |
157 | TREE_STATIC (global_var) = 1; | |
158 | TREE_USED (global_var) = 1; | |
159 | DECL_CONTEXT (global_var) = NULL_TREE; | |
160 | TREE_THIS_VOLATILE (global_var) = 0; | |
161 | TREE_ADDRESSABLE (global_var) = 0; | |
3828719a | 162 | VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1; |
5006671f | 163 | |
3828719a | 164 | fn->gimple_df->vop = global_var; |
79f99d42 | 165 | } |
79f99d42 | 166 | |
5006671f RG |
167 | /* These are the sizes of the operand memory buffer in bytes which gets |
168 | allocated each time more operands space is required. The final value is | |
169 | the amount that is allocated every time after that. | |
170 | In 1k we can fit 25 use operands (or 63 def operands) on a host with | |
171 | 8 byte pointers, that would be 10 statements each with 1 def and 2 | |
172 | uses. */ | |
b8698a0f | 173 | |
79f99d42 | 174 | #define OP_SIZE_INIT 0 |
5006671f RG |
175 | #define OP_SIZE_1 (1024 - sizeof (void *)) |
176 | #define OP_SIZE_2 (1024 * 4 - sizeof (void *)) | |
177 | #define OP_SIZE_3 (1024 * 16 - sizeof (void *)) | |
79f99d42 | 178 | |
f47c96aa AM |
179 | /* Initialize the operand cache routines. */ |
180 | ||
181 | void | |
3828719a | 182 | init_ssa_operands (struct function *fn) |
f47c96aa | 183 | { |
456cde30 JH |
184 | if (!n_initialized++) |
185 | { | |
9771b263 | 186 | build_uses.create (10); |
5006671f RG |
187 | build_vuse = NULL_TREE; |
188 | build_vdef = NULL_TREE; | |
497f1b81 | 189 | bitmap_obstack_initialize (&operands_bitmap_obstack); |
456cde30 JH |
190 | } |
191 | ||
3828719a RG |
192 | gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL); |
193 | gimple_ssa_operands (fn)->operand_memory_index | |
194 | = gimple_ssa_operands (fn)->ssa_operand_mem_size; | |
195 | gimple_ssa_operands (fn)->ops_active = true; | |
196 | gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT; | |
197 | create_vop_var (fn); | |
f47c96aa | 198 | } |
6de9cd9a | 199 | |
1a24f92f | 200 | |
f47c96aa AM |
201 | /* Dispose of anything required by the operand routines. */ |
202 | ||
203 | void | |
6a58ccca | 204 | fini_ssa_operands (struct function *fn) |
f47c96aa AM |
205 | { |
206 | struct ssa_operand_memory_d *ptr; | |
38635499 | 207 | |
456cde30 JH |
208 | if (!--n_initialized) |
209 | { | |
9771b263 | 210 | build_uses.release (); |
5006671f RG |
211 | build_vdef = NULL_TREE; |
212 | build_vuse = NULL_TREE; | |
456cde30 | 213 | } |
38635499 | 214 | |
6a58ccca | 215 | gimple_ssa_operands (fn)->free_uses = NULL; |
38635499 | 216 | |
6a58ccca | 217 | while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL) |
f47c96aa | 218 | { |
6a58ccca RB |
219 | gimple_ssa_operands (fn)->operand_memory |
220 | = gimple_ssa_operands (fn)->operand_memory->next; | |
f47c96aa | 221 | ggc_free (ptr); |
1a24f92f AM |
222 | } |
223 | ||
6a58ccca | 224 | gimple_ssa_operands (fn)->ops_active = false; |
38635499 | 225 | |
497f1b81 JH |
226 | if (!n_initialized) |
227 | bitmap_obstack_release (&operands_bitmap_obstack); | |
726a989a | 228 | |
6a58ccca | 229 | fn->gimple_df->vop = NULL_TREE; |
f47c96aa | 230 | } |
1a24f92f | 231 | |
6de9cd9a | 232 | |
5006671f | 233 | /* Return memory for an operand of size SIZE. */ |
b8698a0f | 234 | |
f47c96aa | 235 | static inline void * |
6a58ccca | 236 | ssa_operand_alloc (struct function *fn, unsigned size) |
f47c96aa AM |
237 | { |
238 | char *ptr; | |
38635499 | 239 | |
4b671e64 | 240 | gcc_assert (size == sizeof (struct use_optype_d)); |
5006671f | 241 | |
6a58ccca RB |
242 | if (gimple_ssa_operands (fn)->operand_memory_index + size |
243 | >= gimple_ssa_operands (fn)->ssa_operand_mem_size) | |
f47c96aa AM |
244 | { |
245 | struct ssa_operand_memory_d *ptr; | |
79f99d42 | 246 | |
6a58ccca | 247 | switch (gimple_ssa_operands (fn)->ssa_operand_mem_size) |
5006671f RG |
248 | { |
249 | case OP_SIZE_INIT: | |
6a58ccca | 250 | gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1; |
5006671f RG |
251 | break; |
252 | case OP_SIZE_1: | |
6a58ccca | 253 | gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2; |
5006671f RG |
254 | break; |
255 | case OP_SIZE_2: | |
256 | case OP_SIZE_3: | |
6a58ccca | 257 | gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3; |
5006671f RG |
258 | break; |
259 | default: | |
260 | gcc_unreachable (); | |
261 | } | |
79f99d42 | 262 | |
a9429e29 | 263 | |
766090c2 TS |
264 | ptr = (ssa_operand_memory_d *) ggc_internal_alloc |
265 | (sizeof (void *) + gimple_ssa_operands (fn)->ssa_operand_mem_size); | |
a9429e29 | 266 | |
6a58ccca RB |
267 | ptr->next = gimple_ssa_operands (fn)->operand_memory; |
268 | gimple_ssa_operands (fn)->operand_memory = ptr; | |
269 | gimple_ssa_operands (fn)->operand_memory_index = 0; | |
f47c96aa | 270 | } |
5006671f | 271 | |
6a58ccca RB |
272 | ptr = &(gimple_ssa_operands (fn)->operand_memory |
273 | ->mem[gimple_ssa_operands (fn)->operand_memory_index]); | |
274 | gimple_ssa_operands (fn)->operand_memory_index += size; | |
f47c96aa | 275 | return ptr; |
6de9cd9a DN |
276 | } |
277 | ||
1a24f92f | 278 | |
79f99d42 AM |
279 | /* Allocate a USE operand. */ |
280 | ||
38635499 | 281 | static inline struct use_optype_d * |
6a58ccca | 282 | alloc_use (struct function *fn) |
38635499 DN |
283 | { |
284 | struct use_optype_d *ret; | |
6a58ccca | 285 | if (gimple_ssa_operands (fn)->free_uses) |
38635499 | 286 | { |
6a58ccca RB |
287 | ret = gimple_ssa_operands (fn)->free_uses; |
288 | gimple_ssa_operands (fn)->free_uses | |
289 | = gimple_ssa_operands (fn)->free_uses->next; | |
38635499 DN |
290 | } |
291 | else | |
79f99d42 | 292 | ret = (struct use_optype_d *) |
6a58ccca | 293 | ssa_operand_alloc (fn, sizeof (struct use_optype_d)); |
38635499 DN |
294 | return ret; |
295 | } | |
296 | ||
297 | ||
79f99d42 | 298 | /* Adds OP to the list of uses of statement STMT after LAST. */ |
ac574e1b | 299 | |
38635499 | 300 | static inline use_optype_p |
355fe088 | 301 | add_use_op (struct function *fn, gimple *stmt, tree *op, use_optype_p last) |
ac574e1b | 302 | { |
c22940cd TN |
303 | use_optype_p new_use; |
304 | ||
6a58ccca | 305 | new_use = alloc_use (fn); |
c22940cd TN |
306 | USE_OP_PTR (new_use)->use = op; |
307 | link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt); | |
308 | last->next = new_use; | |
309 | new_use->next = NULL; | |
310 | return new_use; | |
ac574e1b ZD |
311 | } |
312 | ||
ac574e1b | 313 | |
ac574e1b | 314 | |
ac574e1b | 315 | /* Takes elements from build_defs and turns them into def operands of STMT. |
9771b263 | 316 | TODO -- Make build_defs vec of tree *. */ |
ac574e1b ZD |
317 | |
318 | static inline void | |
355fe088 | 319 | finalize_ssa_defs (struct function *fn, gimple *stmt) |
ac574e1b | 320 | { |
5006671f RG |
321 | /* Pre-pend the vdef we may have built. */ |
322 | if (build_vdef != NULL_TREE) | |
323 | { | |
324 | tree oldvdef = gimple_vdef (stmt); | |
325 | if (oldvdef | |
326 | && TREE_CODE (oldvdef) == SSA_NAME) | |
327 | oldvdef = SSA_NAME_VAR (oldvdef); | |
328 | if (oldvdef != build_vdef) | |
329 | gimple_set_vdef (stmt, build_vdef); | |
5006671f RG |
330 | } |
331 | ||
5006671f RG |
332 | /* Clear and unlink a no longer necessary VDEF. */ |
333 | if (build_vdef == NULL_TREE | |
334 | && gimple_vdef (stmt) != NULL_TREE) | |
335 | { | |
336 | if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) | |
337 | { | |
338 | unlink_stmt_vdef (stmt); | |
6a58ccca | 339 | release_ssa_name_fn (fn, gimple_vdef (stmt)); |
5006671f RG |
340 | } |
341 | gimple_set_vdef (stmt, NULL_TREE); | |
342 | } | |
343 | ||
344 | /* If we have a non-SSA_NAME VDEF, mark it for renaming. */ | |
345 | if (gimple_vdef (stmt) | |
346 | && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME) | |
13714310 | 347 | { |
6a58ccca RB |
348 | fn->gimple_df->rename_vops = 1; |
349 | fn->gimple_df->ssa_renaming_needed = 1; | |
13714310 | 350 | } |
ac574e1b | 351 | } |
f47c96aa | 352 | |
6de9cd9a | 353 | |
572d790a | 354 | /* Takes elements from build_uses and turns them into use operands of STMT. */ |
ac574e1b ZD |
355 | |
356 | static inline void | |
355fe088 | 357 | finalize_ssa_uses (struct function *fn, gimple *stmt) |
ac574e1b ZD |
358 | { |
359 | unsigned new_i; | |
360 | struct use_optype_d new_list; | |
361 | use_optype_p old_ops, ptr, last; | |
ac574e1b | 362 | |
5006671f RG |
363 | /* Pre-pend the VUSE we may have built. */ |
364 | if (build_vuse != NULL_TREE) | |
365 | { | |
366 | tree oldvuse = gimple_vuse (stmt); | |
367 | if (oldvuse | |
368 | && TREE_CODE (oldvuse) == SSA_NAME) | |
369 | oldvuse = SSA_NAME_VAR (oldvuse); | |
370 | if (oldvuse != (build_vuse != NULL_TREE | |
371 | ? build_vuse : build_vdef)) | |
372 | gimple_set_vuse (stmt, NULL_TREE); | |
572d790a | 373 | build_uses.safe_insert (0, gimple_vuse_ptr (stmt)); |
5006671f RG |
374 | } |
375 | ||
ac574e1b ZD |
376 | new_list.next = NULL; |
377 | last = &new_list; | |
378 | ||
726a989a | 379 | old_ops = gimple_use_ops (stmt); |
ac574e1b | 380 | |
5006671f RG |
381 | /* Clear a no longer necessary VUSE. */ |
382 | if (build_vuse == NULL_TREE | |
383 | && gimple_vuse (stmt) != NULL_TREE) | |
384 | gimple_set_vuse (stmt, NULL_TREE); | |
385 | ||
ac574e1b ZD |
386 | /* If there is anything in the old list, free it. */ |
387 | if (old_ops) | |
388 | { | |
6d64f20c | 389 | for (ptr = old_ops; ptr->next; ptr = ptr->next) |
ac574e1b | 390 | delink_imm_use (USE_OP_PTR (ptr)); |
6d64f20c RB |
391 | delink_imm_use (USE_OP_PTR (ptr)); |
392 | ptr->next = gimple_ssa_operands (fn)->free_uses; | |
6a58ccca | 393 | gimple_ssa_operands (fn)->free_uses = old_ops; |
ac574e1b ZD |
394 | } |
395 | ||
5006671f RG |
396 | /* If we added a VUSE, make sure to set the operand if it is not already |
397 | present and mark it for renaming. */ | |
398 | if (build_vuse != NULL_TREE | |
399 | && gimple_vuse (stmt) == NULL_TREE) | |
400 | { | |
6a58ccca RB |
401 | gimple_set_vuse (stmt, gimple_vop (fn)); |
402 | fn->gimple_df->rename_vops = 1; | |
403 | fn->gimple_df->ssa_renaming_needed = 1; | |
5006671f RG |
404 | } |
405 | ||
6c00f606 | 406 | /* Now create nodes for all the new nodes. */ |
9771b263 | 407 | for (new_i = 0; new_i < build_uses.length (); new_i++) |
13714310 | 408 | { |
572d790a | 409 | tree *op = build_uses[new_i]; |
6a58ccca | 410 | last = add_use_op (fn, stmt, op, last); |
13714310 | 411 | } |
6c00f606 | 412 | |
ac574e1b | 413 | /* Now set the stmt's operands. */ |
726a989a | 414 | gimple_set_use_ops (stmt, new_list.next); |
6de9cd9a | 415 | } |
1a24f92f | 416 | |
38635499 DN |
417 | |
418 | /* Clear the in_list bits and empty the build array for VDEFs and | |
419 | VUSEs. */ | |
ac574e1b ZD |
420 | |
421 | static inline void | |
38635499 | 422 | cleanup_build_arrays (void) |
ac574e1b | 423 | { |
5006671f RG |
424 | build_vdef = NULL_TREE; |
425 | build_vuse = NULL_TREE; | |
9771b263 | 426 | build_uses.truncate (0); |
a32b97a2 BB |
427 | } |
428 | ||
6de9cd9a | 429 | |
1a24f92f | 430 | /* Finalize all the build vectors, fill the new ones into INFO. */ |
b8698a0f | 431 | |
1a24f92f | 432 | static inline void |
355fe088 | 433 | finalize_ssa_stmt_operands (struct function *fn, gimple *stmt) |
1a24f92f | 434 | { |
6a58ccca RB |
435 | finalize_ssa_defs (fn, stmt); |
436 | finalize_ssa_uses (fn, stmt); | |
38635499 | 437 | cleanup_build_arrays (); |
6de9cd9a DN |
438 | } |
439 | ||
440 | ||
1a24f92f AM |
441 | /* Start the process of building up operands vectors in INFO. */ |
442 | ||
443 | static inline void | |
444 | start_ssa_stmt_operands (void) | |
6de9cd9a | 445 | { |
9771b263 | 446 | gcc_assert (build_uses.length () == 0); |
5006671f RG |
447 | gcc_assert (build_vuse == NULL_TREE); |
448 | gcc_assert (build_vdef == NULL_TREE); | |
6de9cd9a DN |
449 | } |
450 | ||
451 | ||
1a24f92f | 452 | /* Add USE_P to the list of pointers to operands. */ |
6de9cd9a DN |
453 | |
454 | static inline void | |
1a24f92f | 455 | append_use (tree *use_p) |
6de9cd9a | 456 | { |
572d790a | 457 | build_uses.safe_push (use_p); |
6de9cd9a DN |
458 | } |
459 | ||
460 | ||
38635499 | 461 | /* Add VAR to the set of variables that require a VDEF operator. */ |
6de9cd9a | 462 | |
1a24f92f | 463 | static inline void |
38635499 | 464 | append_vdef (tree var) |
6de9cd9a | 465 | { |
5006671f RG |
466 | gcc_assert ((build_vdef == NULL_TREE |
467 | || build_vdef == var) | |
468 | && (build_vuse == NULL_TREE | |
469 | || build_vuse == var)); | |
38635499 | 470 | |
5006671f RG |
471 | build_vdef = var; |
472 | build_vuse = var; | |
6de9cd9a DN |
473 | } |
474 | ||
475 | ||
38635499 | 476 | /* Add VAR to the set of variables that require a VUSE operator. */ |
6de9cd9a | 477 | |
1a24f92f AM |
478 | static inline void |
479 | append_vuse (tree var) | |
6de9cd9a | 480 | { |
5006671f RG |
481 | gcc_assert (build_vuse == NULL_TREE |
482 | || build_vuse == var); | |
6de9cd9a | 483 | |
5006671f | 484 | build_vuse = var; |
f430bae8 AM |
485 | } |
486 | ||
5006671f | 487 | /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ |
03c4c2e0 | 488 | |
5006671f | 489 | static void |
6a58ccca | 490 | add_virtual_operand (struct function *fn, |
355fe088 | 491 | gimple *stmt ATTRIBUTE_UNUSED, int flags) |
5006671f RG |
492 | { |
493 | /* Add virtual operands to the stmt, unless the caller has specifically | |
494 | requested not to do that (used when adding operands inside an | |
495 | ADDR_EXPR expression). */ | |
496 | if (flags & opf_no_vops) | |
497 | return; | |
498 | ||
b5b8b0ac AO |
499 | gcc_assert (!is_gimple_debug (stmt)); |
500 | ||
5006671f | 501 | if (flags & opf_def) |
6a58ccca | 502 | append_vdef (gimple_vop (fn)); |
5006671f | 503 | else |
6a58ccca | 504 | append_vuse (gimple_vop (fn)); |
f47c96aa AM |
505 | } |
506 | ||
f47c96aa | 507 | |
726a989a RB |
508 | /* Add *VAR_P to the appropriate operand array for statement STMT. |
509 | FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, | |
510 | it will be added to the statement's real operands, otherwise it is | |
511 | added to virtual operands. */ | |
02075bb2 DN |
512 | |
513 | static void | |
355fe088 | 514 | add_stmt_operand (struct function *fn, tree *var_p, gimple *stmt, int flags) |
f47c96aa | 515 | { |
67386041 | 516 | tree var = *var_p; |
f47c96aa | 517 | |
6df46f59 | 518 | gcc_assert (SSA_VAR_P (*var_p) || TREE_CODE (*var_p) == STRING_CST); |
f47c96aa | 519 | |
67386041 | 520 | if (is_gimple_reg (var)) |
f47c96aa | 521 | { |
02075bb2 | 522 | /* The variable is a GIMPLE register. Add it to real operands. */ |
38635499 | 523 | if (flags & opf_def) |
4b671e64 | 524 | ; |
02075bb2 DN |
525 | else |
526 | append_use (var_p); | |
4b671e64 | 527 | if (DECL_P (*var_p)) |
6a58ccca | 528 | fn->gimple_df->ssa_renaming_needed = 1; |
f47c96aa | 529 | } |
02075bb2 | 530 | else |
67386041 RG |
531 | { |
532 | /* Mark statements with volatile operands. */ | |
533 | if (!(flags & opf_no_vops) | |
534 | && TREE_THIS_VOLATILE (var)) | |
535 | gimple_set_has_volatile_ops (stmt, true); | |
536 | ||
537 | /* The variable is a memory access. Add virtual operands. */ | |
6a58ccca | 538 | add_virtual_operand (fn, stmt, flags); |
67386041 | 539 | } |
02075bb2 | 540 | } |
f47c96aa | 541 | |
ccacdf06 RG |
542 | /* Mark the base address of REF as having its address taken. |
543 | REF may be a single variable whose address has been taken or any | |
544 | other valid GIMPLE memory reference (structure reference, array, | |
545 | etc). */ | |
f47c96aa | 546 | |
02075bb2 | 547 | static void |
ccacdf06 | 548 | mark_address_taken (tree ref) |
28f6b1e4 | 549 | { |
5006671f | 550 | tree var; |
f47c96aa | 551 | |
5006671f RG |
552 | /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF |
553 | as the only thing we take the address of. If VAR is a structure, | |
554 | taking the address of a field means that the whole structure may | |
555 | be referenced using pointer arithmetic. See PR 21407 and the | |
556 | ensuing mailing list discussion. */ | |
557 | var = get_base_address (ref); | |
70f34814 RG |
558 | if (var) |
559 | { | |
560 | if (DECL_P (var)) | |
561 | TREE_ADDRESSABLE (var) = 1; | |
562 | else if (TREE_CODE (var) == MEM_REF | |
563 | && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR | |
564 | && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0))) | |
565 | TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1; | |
566 | } | |
f430bae8 AM |
567 | } |
568 | ||
28f6b1e4 | 569 | |
be1ac4ec | 570 | /* A subroutine of get_expr_operands to handle MEM_REF. |
a509ebb5 | 571 | |
70f34814 | 572 | STMT is the statement being processed, EXPR is the MEM_REF |
a509ebb5 | 573 | that got us here. |
b8698a0f | 574 | |
4b671e64 | 575 | FLAGS is as in get_expr_operands. */ |
a509ebb5 RL |
576 | |
577 | static void | |
75fcf287 | 578 | get_mem_ref_operands (struct function *fn, |
355fe088 | 579 | gimple *stmt, tree expr, int flags) |
a509ebb5 RL |
580 | { |
581 | tree *pptr = &TREE_OPERAND (expr, 0); | |
a509ebb5 | 582 | |
9b854a97 RG |
583 | if (!(flags & opf_no_vops) |
584 | && TREE_THIS_VOLATILE (expr)) | |
726a989a | 585 | gimple_set_has_volatile_ops (stmt, true); |
a509ebb5 | 586 | |
5006671f | 587 | /* Add the VOP. */ |
6a58ccca | 588 | add_virtual_operand (fn, stmt, flags); |
5006671f RG |
589 | |
590 | /* If requested, add a USE operand for the base pointer. */ | |
6a58ccca | 591 | get_expr_operands (fn, stmt, pptr, |
4b671e64 MM |
592 | opf_non_addressable | opf_use |
593 | | (flags & (opf_no_vops|opf_not_non_addressable))); | |
a509ebb5 | 594 | } |
643519b7 | 595 | |
28f6b1e4 | 596 | |
02075bb2 | 597 | /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ |
6de9cd9a DN |
598 | |
599 | static void | |
355fe088 | 600 | get_tmr_operands (struct function *fn, gimple *stmt, tree expr, int flags) |
6de9cd9a | 601 | { |
9b854a97 RG |
602 | if (!(flags & opf_no_vops) |
603 | && TREE_THIS_VOLATILE (expr)) | |
5e9fb3db RG |
604 | gimple_set_has_volatile_ops (stmt, true); |
605 | ||
38635499 | 606 | /* First record the real operands. */ |
6a58ccca RB |
607 | get_expr_operands (fn, stmt, |
608 | &TMR_BASE (expr), opf_use | (flags & opf_no_vops)); | |
609 | get_expr_operands (fn, stmt, | |
610 | &TMR_INDEX (expr), opf_use | (flags & opf_no_vops)); | |
611 | get_expr_operands (fn, stmt, | |
612 | &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops)); | |
613 | ||
614 | add_virtual_operand (fn, stmt, flags); | |
02075bb2 DN |
615 | } |
616 | ||
617 | ||
726a989a RB |
618 | /* If STMT is a call that may clobber globals and other symbols that |
619 | escape, add them to the VDEF/VUSE lists for it. */ | |
02075bb2 DN |
620 | |
621 | static void | |
538dd0b7 | 622 | maybe_add_call_vops (struct function *fn, gcall *stmt) |
02075bb2 | 623 | { |
726a989a | 624 | int call_flags = gimple_call_flags (stmt); |
02075bb2 | 625 | |
38635499 | 626 | /* If aliases have been computed already, add VDEF or VUSE |
02075bb2 | 627 | operands for all the symbols that have been found to be |
38635499 | 628 | call-clobbered. */ |
5006671f | 629 | if (!(call_flags & ECF_NOVOPS)) |
02075bb2 | 630 | { |
308173e3 RB |
631 | /* A 'pure' or a 'const' function never call-clobbers anything. */ |
632 | if (!(call_flags & (ECF_PURE | ECF_CONST))) | |
6a58ccca | 633 | add_virtual_operand (fn, stmt, opf_def); |
02075bb2 | 634 | else if (!(call_flags & ECF_CONST)) |
6a58ccca | 635 | add_virtual_operand (fn, stmt, opf_use); |
02075bb2 | 636 | } |
02075bb2 DN |
637 | } |
638 | ||
639 | ||
640 | /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ | |
641 | ||
642 | static void | |
538dd0b7 | 643 | get_asm_stmt_operands (struct function *fn, gasm *stmt) |
02075bb2 | 644 | { |
726a989a | 645 | size_t i, noutputs; |
38635499 | 646 | const char **oconstraints; |
02075bb2 DN |
647 | const char *constraint; |
648 | bool allows_mem, allows_reg, is_inout; | |
38635499 | 649 | |
726a989a | 650 | noutputs = gimple_asm_noutputs (stmt); |
38635499 | 651 | oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); |
02075bb2 | 652 | |
38635499 | 653 | /* Gather all output operands. */ |
726a989a | 654 | for (i = 0; i < gimple_asm_noutputs (stmt); i++) |
02075bb2 | 655 | { |
726a989a | 656 | tree link = gimple_asm_output_op (stmt, i); |
65ad7c63 DN |
657 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
658 | oconstraints[i] = constraint; | |
659 | parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
660 | &allows_reg, &is_inout); | |
02075bb2 DN |
661 | |
662 | /* This should have been split in gimplify_asm_expr. */ | |
663 | gcc_assert (!allows_reg || !is_inout); | |
664 | ||
665 | /* Memory operands are addressable. Note that STMT needs the | |
666 | address of this operand. */ | |
667 | if (!allows_reg && allows_mem) | |
2ea9dc64 | 668 | mark_address_taken (TREE_VALUE (link)); |
02075bb2 | 669 | |
6a58ccca RB |
670 | get_expr_operands (fn, stmt, |
671 | &TREE_VALUE (link), opf_def | opf_not_non_addressable); | |
02075bb2 DN |
672 | } |
673 | ||
38635499 | 674 | /* Gather all input operands. */ |
726a989a | 675 | for (i = 0; i < gimple_asm_ninputs (stmt); i++) |
02075bb2 | 676 | { |
726a989a | 677 | tree link = gimple_asm_input_op (stmt, i); |
02075bb2 | 678 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
38635499 DN |
679 | parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, |
680 | &allows_mem, &allows_reg); | |
02075bb2 DN |
681 | |
682 | /* Memory operands are addressable. Note that STMT needs the | |
683 | address of this operand. */ | |
684 | if (!allows_reg && allows_mem) | |
2ea9dc64 | 685 | mark_address_taken (TREE_VALUE (link)); |
02075bb2 | 686 | |
6a58ccca | 687 | get_expr_operands (fn, stmt, &TREE_VALUE (link), opf_not_non_addressable); |
02075bb2 DN |
688 | } |
689 | ||
38635499 | 690 | /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ |
edcdea5b | 691 | if (gimple_asm_clobbers_memory_p (stmt)) |
6a58ccca | 692 | add_virtual_operand (fn, stmt, opf_def); |
65ad7c63 DN |
693 | } |
694 | ||
695 | ||
02075bb2 | 696 | /* Recursively scan the expression pointed to by EXPR_P in statement |
65ad7c63 DN |
697 | STMT. FLAGS is one of the OPF_* constants modifying how to |
698 | interpret the operands found. */ | |
02075bb2 DN |
699 | |
700 | static void | |
355fe088 | 701 | get_expr_operands (struct function *fn, gimple *stmt, tree *expr_p, int flags) |
02075bb2 DN |
702 | { |
703 | enum tree_code code; | |
c22940cd | 704 | enum tree_code_class codeclass; |
02075bb2 | 705 | tree expr = *expr_p; |
b5b8b0ac | 706 | int uflags = opf_use; |
02075bb2 DN |
707 | |
708 | if (expr == NULL) | |
709 | return; | |
710 | ||
b5b8b0ac AO |
711 | if (is_gimple_debug (stmt)) |
712 | uflags |= (flags & opf_no_vops); | |
713 | ||
02075bb2 | 714 | code = TREE_CODE (expr); |
c22940cd | 715 | codeclass = TREE_CODE_CLASS (code); |
02075bb2 DN |
716 | |
717 | switch (code) | |
718 | { | |
719 | case ADDR_EXPR: | |
720 | /* Taking the address of a variable does not represent a | |
721 | reference to it, but the fact that the statement takes its | |
722 | address will be of interest to some passes (e.g. alias | |
723 | resolution). */ | |
70f34814 RG |
724 | if ((!(flags & opf_non_addressable) |
725 | || (flags & opf_not_non_addressable)) | |
726 | && !is_gimple_debug (stmt)) | |
b5b8b0ac | 727 | mark_address_taken (TREE_OPERAND (expr, 0)); |
02075bb2 | 728 | |
02075bb2 DN |
729 | /* Otherwise, there may be variables referenced inside but there |
730 | should be no VUSEs created, since the referenced objects are | |
731 | not really accessed. The only operands that we should find | |
732 | here are ARRAY_REF indices which will always be real operands | |
733 | (GIMPLE does not allow non-registers as array indices). */ | |
734 | flags |= opf_no_vops; | |
6a58ccca | 735 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), |
75fcf287 | 736 | flags | opf_not_non_addressable | opf_address_taken); |
02075bb2 DN |
737 | return; |
738 | ||
739 | case SSA_NAME: | |
02075bb2 DN |
740 | case VAR_DECL: |
741 | case PARM_DECL: | |
742 | case RESULT_DECL: | |
6df46f59 | 743 | case STRING_CST: |
75fcf287 RB |
744 | if (!(flags & opf_address_taken)) |
745 | add_stmt_operand (fn, expr_p, stmt, flags); | |
5611cf0b | 746 | return; |
02075bb2 | 747 | |
0ca5af51 AO |
748 | case DEBUG_EXPR_DECL: |
749 | gcc_assert (gimple_debug_bind_p (stmt)); | |
750 | return; | |
751 | ||
70f34814 | 752 | case MEM_REF: |
75fcf287 | 753 | get_mem_ref_operands (fn, stmt, expr, flags); |
02075bb2 DN |
754 | return; |
755 | ||
756 | case TARGET_MEM_REF: | |
6a58ccca | 757 | get_tmr_operands (fn, stmt, expr, flags); |
02075bb2 DN |
758 | return; |
759 | ||
02075bb2 | 760 | case ARRAY_REF: |
65ad7c63 | 761 | case ARRAY_RANGE_REF: |
02075bb2 DN |
762 | case COMPONENT_REF: |
763 | case REALPART_EXPR: | |
764 | case IMAGPART_EXPR: | |
765 | { | |
9b854a97 RG |
766 | if (!(flags & opf_no_vops) |
767 | && TREE_THIS_VOLATILE (expr)) | |
726a989a | 768 | gimple_set_has_volatile_ops (stmt, true); |
b65e51a8 | 769 | |
6a58ccca | 770 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags); |
b8698a0f | 771 | |
c75ab022 | 772 | if (code == COMPONENT_REF) |
305a1321 | 773 | { |
9b854a97 RG |
774 | if (!(flags & opf_no_vops) |
775 | && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) | |
726a989a | 776 | gimple_set_has_volatile_ops (stmt, true); |
6a58ccca | 777 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags); |
305a1321 | 778 | } |
65ad7c63 | 779 | else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) |
a916f21d | 780 | { |
6a58ccca RB |
781 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags); |
782 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags); | |
783 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 3), uflags); | |
a916f21d | 784 | } |
643519b7 | 785 | |
c75ab022 DB |
786 | return; |
787 | } | |
643519b7 | 788 | |
d25cee4d | 789 | case WITH_SIZE_EXPR: |
0e28378a | 790 | /* WITH_SIZE_EXPR is a pass-through reference to its first argument, |
d25cee4d | 791 | and an rvalue reference to its second argument. */ |
6a58ccca RB |
792 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags); |
793 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags); | |
d25cee4d RH |
794 | return; |
795 | ||
40923b20 | 796 | case COND_EXPR: |
ad9f20cb | 797 | case VEC_COND_EXPR: |
2205ed25 | 798 | case VEC_PERM_EXPR: |
6a58ccca RB |
799 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), uflags); |
800 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags); | |
801 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags); | |
40923b20 DP |
802 | return; |
803 | ||
7b48e1e0 RH |
804 | case CONSTRUCTOR: |
805 | { | |
806 | /* General aggregate CONSTRUCTORs have been decomposed, but they | |
807 | are still in use as the COMPLEX_EXPR equivalent for vectors. */ | |
4038c495 GB |
808 | constructor_elt *ce; |
809 | unsigned HOST_WIDE_INT idx; | |
7b48e1e0 | 810 | |
47598145 MM |
811 | /* A volatile constructor is actually TREE_CLOBBER_P, transfer |
812 | the volatility to the statement, don't use TREE_CLOBBER_P for | |
813 | mirroring the other uses of THIS_VOLATILE in this file. */ | |
9b854a97 RG |
814 | if (!(flags & opf_no_vops) |
815 | && TREE_THIS_VOLATILE (expr)) | |
47598145 MM |
816 | gimple_set_has_volatile_ops (stmt, true); |
817 | ||
4038c495 | 818 | for (idx = 0; |
9771b263 | 819 | vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce); |
4038c495 | 820 | idx++) |
6a58ccca | 821 | get_expr_operands (fn, stmt, &ce->value, uflags); |
7b48e1e0 RH |
822 | |
823 | return; | |
824 | } | |
825 | ||
310de761 | 826 | case BIT_FIELD_REF: |
9b854a97 RG |
827 | if (!(flags & opf_no_vops) |
828 | && TREE_THIS_VOLATILE (expr)) | |
f11bea25 JJ |
829 | gimple_set_has_volatile_ops (stmt, true); |
830 | /* FALLTHRU */ | |
831 | ||
4626c433 | 832 | case VIEW_CONVERT_EXPR: |
310de761 | 833 | do_unary: |
6a58ccca | 834 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags); |
6de9cd9a | 835 | return; |
6de9cd9a | 836 | |
483c6429 | 837 | case BIT_INSERT_EXPR: |
310de761 RH |
838 | case COMPOUND_EXPR: |
839 | case OBJ_TYPE_REF: | |
0bca51f0 | 840 | case ASSERT_EXPR: |
310de761 RH |
841 | do_binary: |
842 | { | |
6a58ccca RB |
843 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags); |
844 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags); | |
310de761 RH |
845 | return; |
846 | } | |
847 | ||
20f06221 | 848 | case DOT_PROD_EXPR: |
79d652a5 | 849 | case SAD_EXPR: |
7ccf35ed | 850 | case REALIGN_LOAD_EXPR: |
0354c0c7 BS |
851 | case WIDEN_MULT_PLUS_EXPR: |
852 | case WIDEN_MULT_MINUS_EXPR: | |
7ccf35ed | 853 | { |
6a58ccca RB |
854 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags); |
855 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags); | |
856 | get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), flags); | |
0354c0c7 | 857 | return; |
7ccf35ed DN |
858 | } |
859 | ||
310de761 | 860 | case FUNCTION_DECL: |
310de761 | 861 | case LABEL_DECL: |
243cdfa8 | 862 | case CONST_DECL: |
726a989a | 863 | case CASE_LABEL_EXPR: |
02075bb2 | 864 | /* Expressions that make no memory references. */ |
310de761 | 865 | return; |
02075bb2 DN |
866 | |
867 | default: | |
c22940cd | 868 | if (codeclass == tcc_unary) |
02075bb2 | 869 | goto do_unary; |
c22940cd | 870 | if (codeclass == tcc_binary || codeclass == tcc_comparison) |
02075bb2 | 871 | goto do_binary; |
c22940cd | 872 | if (codeclass == tcc_constant || codeclass == tcc_type) |
02075bb2 | 873 | return; |
643519b7 | 874 | } |
310de761 | 875 | |
02075bb2 | 876 | /* If we get here, something has gone wrong. */ |
b2b29377 MM |
877 | if (flag_checking) |
878 | { | |
879 | fprintf (stderr, "unhandled expression in get_expr_operands():\n"); | |
880 | debug_tree (expr); | |
881 | fputs ("\n", stderr); | |
882 | gcc_unreachable (); | |
883 | } | |
310de761 RH |
884 | } |
885 | ||
643519b7 | 886 | |
65ad7c63 DN |
887 | /* Parse STMT looking for operands. When finished, the various |
888 | build_* operand vectors will have potential operands in them. */ | |
889 | ||
ac182688 | 890 | static void |
355fe088 | 891 | parse_ssa_operands (struct function *fn, gimple *stmt) |
ac182688 | 892 | { |
726a989a | 893 | enum gimple_code code = gimple_code (stmt); |
1d4fb493 | 894 | size_t i, n, start = 0; |
ac182688 | 895 | |
1d4fb493 | 896 | switch (code) |
b5b8b0ac | 897 | { |
1d4fb493 | 898 | case GIMPLE_ASM: |
538dd0b7 | 899 | get_asm_stmt_operands (fn, as_a <gasm *> (stmt)); |
1d4fb493 RH |
900 | break; |
901 | ||
902 | case GIMPLE_TRANSACTION: | |
903 | /* The start of a transaction is a memory barrier. */ | |
6a58ccca | 904 | add_virtual_operand (fn, stmt, opf_def | opf_use); |
1d4fb493 RH |
905 | break; |
906 | ||
907 | case GIMPLE_DEBUG: | |
b5b8b0ac AO |
908 | if (gimple_debug_bind_p (stmt) |
909 | && gimple_debug_bind_has_value_p (stmt)) | |
6a58ccca | 910 | get_expr_operands (fn, stmt, gimple_debug_bind_get_value_ptr (stmt), |
b5b8b0ac | 911 | opf_use | opf_no_vops); |
1d4fb493 | 912 | break; |
02075bb2 | 913 | |
1d4fb493 | 914 | case GIMPLE_RETURN: |
6a58ccca | 915 | append_vuse (gimple_vop (fn)); |
1d4fb493 | 916 | goto do_default; |
02075bb2 | 917 | |
1d4fb493 | 918 | case GIMPLE_CALL: |
726a989a | 919 | /* Add call-clobbered operands, if needed. */ |
538dd0b7 | 920 | maybe_add_call_vops (fn, as_a <gcall *> (stmt)); |
1d4fb493 | 921 | /* FALLTHRU */ |
e106efc7 | 922 | |
1d4fb493 | 923 | case GIMPLE_ASSIGN: |
6a58ccca | 924 | get_expr_operands (fn, stmt, gimple_op_ptr (stmt, 0), opf_def); |
1d4fb493 RH |
925 | start = 1; |
926 | /* FALLTHRU */ | |
927 | ||
928 | default: | |
929 | do_default: | |
930 | n = gimple_num_ops (stmt); | |
931 | for (i = start; i < n; i++) | |
6a58ccca | 932 | get_expr_operands (fn, stmt, gimple_op_ptr (stmt, i), opf_use); |
1d4fb493 | 933 | break; |
9be7ee44 | 934 | } |
ac182688 ZD |
935 | } |
936 | ||
643519b7 | 937 | |
02075bb2 | 938 | /* Create an operands cache for STMT. */ |
310de761 RH |
939 | |
940 | static void | |
355fe088 | 941 | build_ssa_operands (struct function *fn, gimple *stmt) |
310de761 | 942 | { |
ccacdf06 | 943 | /* Initially assume that the statement has no volatile operands. */ |
726a989a | 944 | gimple_set_has_volatile_ops (stmt, false); |
726a989a | 945 | |
02075bb2 | 946 | start_ssa_stmt_operands (); |
6a58ccca RB |
947 | parse_ssa_operands (fn, stmt); |
948 | finalize_ssa_stmt_operands (fn, stmt); | |
02075bb2 | 949 | } |
e288e2f5 | 950 | |
bc590dfb RG |
951 | /* Verifies SSA statement operands. */ |
952 | ||
953 | DEBUG_FUNCTION bool | |
355fe088 | 954 | verify_ssa_operands (struct function *fn, gimple *stmt) |
bc590dfb RG |
955 | { |
956 | use_operand_p use_p; | |
957 | def_operand_p def_p; | |
958 | ssa_op_iter iter; | |
959 | unsigned i; | |
572d790a | 960 | tree def; |
bc590dfb RG |
961 | bool volatile_p = gimple_has_volatile_ops (stmt); |
962 | ||
963 | /* build_ssa_operands w/o finalizing them. */ | |
964 | gimple_set_has_volatile_ops (stmt, false); | |
965 | start_ssa_stmt_operands (); | |
6a58ccca | 966 | parse_ssa_operands (fn, stmt); |
bc590dfb RG |
967 | |
968 | /* Now verify the built operands are the same as present in STMT. */ | |
969 | def = gimple_vdef (stmt); | |
970 | if (def | |
971 | && TREE_CODE (def) == SSA_NAME) | |
972 | def = SSA_NAME_VAR (def); | |
973 | if (build_vdef != def) | |
974 | { | |
975 | error ("virtual definition of statement not up-to-date"); | |
976 | return true; | |
977 | } | |
978 | if (gimple_vdef (stmt) | |
979 | && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P | |
980 | || DEF_FROM_PTR (def_p) != gimple_vdef (stmt))) | |
981 | { | |
982 | error ("virtual def operand missing for stmt"); | |
983 | return true; | |
984 | } | |
985 | ||
572d790a | 986 | tree use = gimple_vuse (stmt); |
bc590dfb RG |
987 | if (use |
988 | && TREE_CODE (use) == SSA_NAME) | |
989 | use = SSA_NAME_VAR (use); | |
990 | if (build_vuse != use) | |
991 | { | |
992 | error ("virtual use of statement not up-to-date"); | |
993 | return true; | |
994 | } | |
995 | if (gimple_vuse (stmt) | |
996 | && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P | |
997 | || USE_FROM_PTR (use_p) != gimple_vuse (stmt))) | |
998 | { | |
999 | error ("virtual use operand missing for stmt"); | |
1000 | return true; | |
1001 | } | |
1002 | ||
1003 | FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) | |
1004 | { | |
572d790a TS |
1005 | tree *op; |
1006 | FOR_EACH_VEC_ELT (build_uses, i, op) | |
bc590dfb | 1007 | { |
572d790a | 1008 | if (use_p->use == op) |
bc590dfb | 1009 | { |
572d790a | 1010 | build_uses[i] = NULL; |
bc590dfb RG |
1011 | break; |
1012 | } | |
1013 | } | |
9771b263 | 1014 | if (i == build_uses.length ()) |
bc590dfb RG |
1015 | { |
1016 | error ("excess use operand for stmt"); | |
1017 | debug_generic_expr (USE_FROM_PTR (use_p)); | |
1018 | return true; | |
1019 | } | |
1020 | } | |
572d790a TS |
1021 | |
1022 | tree *op; | |
1023 | FOR_EACH_VEC_ELT (build_uses, i, op) | |
1024 | if (op != NULL) | |
bc590dfb RG |
1025 | { |
1026 | error ("use operand missing for stmt"); | |
572d790a | 1027 | debug_generic_expr (*op); |
bc590dfb RG |
1028 | return true; |
1029 | } | |
1030 | ||
bc590dfb RG |
1031 | if (gimple_has_volatile_ops (stmt) != volatile_p) |
1032 | { | |
1033 | error ("stmt volatile flag not up-to-date"); | |
1034 | return true; | |
1035 | } | |
1036 | ||
1037 | cleanup_build_arrays (); | |
1038 | return false; | |
1039 | } | |
1040 | ||
28f6b1e4 | 1041 | |
5f40b3cb ZD |
1042 | /* Releases the operands of STMT back to their freelists, and clears |
1043 | the stmt operand lists. */ | |
1044 | ||
1045 | void | |
355fe088 | 1046 | free_stmt_operands (struct function *fn, gimple *stmt) |
5f40b3cb | 1047 | { |
726a989a | 1048 | use_optype_p uses = gimple_use_ops (stmt), last_use; |
5f40b3cb | 1049 | |
5f40b3cb ZD |
1050 | if (uses) |
1051 | { | |
1052 | for (last_use = uses; last_use->next; last_use = last_use->next) | |
1053 | delink_imm_use (USE_OP_PTR (last_use)); | |
1054 | delink_imm_use (USE_OP_PTR (last_use)); | |
6a58ccca RB |
1055 | last_use->next = gimple_ssa_operands (fn)->free_uses; |
1056 | gimple_ssa_operands (fn)->free_uses = uses; | |
726a989a | 1057 | gimple_set_use_ops (stmt, NULL); |
5f40b3cb ZD |
1058 | } |
1059 | ||
726a989a RB |
1060 | if (gimple_has_mem_ops (stmt)) |
1061 | { | |
5006671f RG |
1062 | gimple_set_vuse (stmt, NULL_TREE); |
1063 | gimple_set_vdef (stmt, NULL_TREE); | |
726a989a | 1064 | } |
310de761 RH |
1065 | } |
1066 | ||
3c0b6c43 | 1067 | |
2434ab1d | 1068 | /* Get the operands of statement STMT. */ |
643519b7 | 1069 | |
02075bb2 | 1070 | void |
355fe088 | 1071 | update_stmt_operands (struct function *fn, gimple *stmt) |
02075bb2 | 1072 | { |
65ad7c63 DN |
1073 | /* If update_stmt_operands is called before SSA is initialized, do |
1074 | nothing. */ | |
6a58ccca | 1075 | if (!ssa_operands_active (fn)) |
02075bb2 | 1076 | return; |
943261d7 | 1077 | |
02075bb2 | 1078 | timevar_push (TV_TREE_OPS); |
943261d7 | 1079 | |
726a989a | 1080 | gcc_assert (gimple_modified_p (stmt)); |
6a58ccca | 1081 | build_ssa_operands (fn, stmt); |
726a989a | 1082 | gimple_set_modified (stmt, false); |
6de9cd9a | 1083 | |
02075bb2 DN |
1084 | timevar_pop (TV_TREE_OPS); |
1085 | } | |
faf7c678 | 1086 | |
65ad7c63 | 1087 | |
02075bb2 DN |
1088 | /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done |
1089 | to test the validity of the swap operation. */ | |
faf7c678 | 1090 | |
02075bb2 | 1091 | void |
355fe088 | 1092 | swap_ssa_operands (gimple *stmt, tree *exp0, tree *exp1) |
02075bb2 DN |
1093 | { |
1094 | tree op0, op1; | |
1095 | op0 = *exp0; | |
1096 | op1 = *exp1; | |
3c0b6c43 | 1097 | |
80560f95 | 1098 | if (op0 != op1) |
02075bb2 | 1099 | { |
80560f95 AM |
1100 | /* Attempt to preserve the relative positions of these two operands in |
1101 | their * respective immediate use lists by adjusting their use pointer | |
1102 | to point to the new operand position. */ | |
02075bb2 DN |
1103 | use_optype_p use0, use1, ptr; |
1104 | use0 = use1 = NULL; | |
3c0b6c43 | 1105 | |
02075bb2 | 1106 | /* Find the 2 operands in the cache, if they are there. */ |
726a989a | 1107 | for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) |
02075bb2 DN |
1108 | if (USE_OP_PTR (ptr)->use == exp0) |
1109 | { | |
1110 | use0 = ptr; | |
1111 | break; | |
1112 | } | |
3c0b6c43 | 1113 | |
726a989a | 1114 | for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) |
02075bb2 DN |
1115 | if (USE_OP_PTR (ptr)->use == exp1) |
1116 | { | |
1117 | use1 = ptr; | |
1118 | break; | |
1119 | } | |
1120 | ||
64d9cb05 RG |
1121 | /* And adjust their location to point to the new position of the |
1122 | operand. */ | |
1123 | if (use0) | |
1124 | USE_OP_PTR (use0)->use = exp1; | |
1125 | if (use1) | |
1126 | USE_OP_PTR (use1)->use = exp0; | |
02075bb2 | 1127 | |
80560f95 AM |
1128 | /* Now swap the data. */ |
1129 | *exp0 = op1; | |
1130 | *exp1 = op0; | |
1131 | } | |
3c0b6c43 DB |
1132 | } |
1133 | ||
726a989a | 1134 | |
f430bae8 | 1135 | /* Scan the immediate_use list for VAR making sure its linked properly. |
65ad7c63 | 1136 | Return TRUE if there is a problem and emit an error message to F. */ |
f430bae8 | 1137 | |
24e47c76 | 1138 | DEBUG_FUNCTION bool |
f430bae8 AM |
1139 | verify_imm_links (FILE *f, tree var) |
1140 | { | |
f47c96aa | 1141 | use_operand_p ptr, prev, list; |
01614e64 | 1142 | unsigned int count; |
f430bae8 AM |
1143 | |
1144 | gcc_assert (TREE_CODE (var) == SSA_NAME); | |
1145 | ||
1146 | list = &(SSA_NAME_IMM_USE_NODE (var)); | |
1147 | gcc_assert (list->use == NULL); | |
1148 | ||
1149 | if (list->prev == NULL) | |
1150 | { | |
1151 | gcc_assert (list->next == NULL); | |
1152 | return false; | |
1153 | } | |
1154 | ||
1155 | prev = list; | |
1156 | count = 0; | |
1157 | for (ptr = list->next; ptr != list; ) | |
1158 | { | |
1159 | if (prev != ptr->prev) | |
01614e64 RB |
1160 | { |
1161 | fprintf (f, "prev != ptr->prev\n"); | |
1162 | goto error; | |
1163 | } | |
b8698a0f | 1164 | |
f430bae8 | 1165 | if (ptr->use == NULL) |
01614e64 RB |
1166 | { |
1167 | fprintf (f, "ptr->use == NULL\n"); | |
1168 | goto error; /* 2 roots, or SAFE guard node. */ | |
1169 | } | |
0e61db61 | 1170 | else if (*(ptr->use) != var) |
01614e64 RB |
1171 | { |
1172 | fprintf (f, "*(ptr->use) != var\n"); | |
1173 | goto error; | |
1174 | } | |
f430bae8 AM |
1175 | |
1176 | prev = ptr; | |
1177 | ptr = ptr->next; | |
643519b7 | 1178 | |
01614e64 RB |
1179 | count++; |
1180 | if (count == 0) | |
1181 | { | |
1182 | fprintf (f, "number of immediate uses doesn't fit unsigned int\n"); | |
1183 | goto error; | |
1184 | } | |
f430bae8 AM |
1185 | } |
1186 | ||
1187 | /* Verify list in the other direction. */ | |
1188 | prev = list; | |
1189 | for (ptr = list->prev; ptr != list; ) | |
1190 | { | |
1191 | if (prev != ptr->next) | |
01614e64 RB |
1192 | { |
1193 | fprintf (f, "prev != ptr->next\n"); | |
1194 | goto error; | |
1195 | } | |
f430bae8 AM |
1196 | prev = ptr; |
1197 | ptr = ptr->prev; | |
01614e64 RB |
1198 | if (count == 0) |
1199 | { | |
1200 | fprintf (f, "count-- < 0\n"); | |
1201 | goto error; | |
1202 | } | |
1203 | count--; | |
f430bae8 AM |
1204 | } |
1205 | ||
1206 | if (count != 0) | |
01614e64 RB |
1207 | { |
1208 | fprintf (f, "count != 0\n"); | |
1209 | goto error; | |
1210 | } | |
f430bae8 AM |
1211 | |
1212 | return false; | |
0e61db61 NS |
1213 | |
1214 | error: | |
726a989a | 1215 | if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt)) |
0e61db61 | 1216 | { |
726a989a RB |
1217 | fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt); |
1218 | print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); | |
0e61db61 | 1219 | } |
b8698a0f | 1220 | fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, |
0e61db61 NS |
1221 | (void *)ptr->use); |
1222 | print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); | |
c3284718 | 1223 | fprintf (f, "\n"); |
0e61db61 | 1224 | return true; |
f430bae8 AM |
1225 | } |
1226 | ||
1227 | ||
1228 | /* Dump all the immediate uses to FILE. */ | |
1229 | ||
1230 | void | |
1231 | dump_immediate_uses_for (FILE *file, tree var) | |
1232 | { | |
1233 | imm_use_iterator iter; | |
1234 | use_operand_p use_p; | |
1235 | ||
1236 | gcc_assert (var && TREE_CODE (var) == SSA_NAME); | |
1237 | ||
1238 | print_generic_expr (file, var, TDF_SLIM); | |
1239 | fprintf (file, " : -->"); | |
1240 | if (has_zero_uses (var)) | |
1241 | fprintf (file, " no uses.\n"); | |
1242 | else | |
1243 | if (has_single_use (var)) | |
1244 | fprintf (file, " single use.\n"); | |
1245 | else | |
1246 | fprintf (file, "%d uses.\n", num_imm_uses (var)); | |
1247 | ||
1248 | FOR_EACH_IMM_USE_FAST (use_p, iter, var) | |
1249 | { | |
726a989a | 1250 | if (use_p->loc.stmt == NULL && use_p->use == NULL) |
afd83fe4 | 1251 | fprintf (file, "***end of stmt iterator marker***\n"); |
f47c96aa | 1252 | else |
afd83fe4 | 1253 | if (!is_gimple_reg (USE_FROM_PTR (use_p))) |
726a989a | 1254 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); |
afd83fe4 | 1255 | else |
726a989a | 1256 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); |
f430bae8 | 1257 | } |
c3284718 | 1258 | fprintf (file, "\n"); |
f430bae8 AM |
1259 | } |
1260 | ||
643519b7 | 1261 | |
f430bae8 AM |
1262 | /* Dump all the immediate uses to FILE. */ |
1263 | ||
1264 | void | |
1265 | dump_immediate_uses (FILE *file) | |
1266 | { | |
1267 | tree var; | |
1268 | unsigned int x; | |
1269 | ||
1270 | fprintf (file, "Immediate_uses: \n\n"); | |
46aa019a | 1271 | FOR_EACH_SSA_NAME (x, var, cfun) |
f430bae8 | 1272 | { |
f430bae8 AM |
1273 | dump_immediate_uses_for (file, var); |
1274 | } | |
1275 | } | |
1276 | ||
1277 | ||
1278 | /* Dump def-use edges on stderr. */ | |
1279 | ||
24e47c76 | 1280 | DEBUG_FUNCTION void |
f430bae8 AM |
1281 | debug_immediate_uses (void) |
1282 | { | |
1283 | dump_immediate_uses (stderr); | |
1284 | } | |
1285 | ||
65ad7c63 | 1286 | |
f430bae8 AM |
1287 | /* Dump def-use edges on stderr. */ |
1288 | ||
24e47c76 | 1289 | DEBUG_FUNCTION void |
f430bae8 AM |
1290 | debug_immediate_uses_for (tree var) |
1291 | { | |
1292 | dump_immediate_uses_for (stderr, var); | |
1a24f92f | 1293 | } |
cfaab3a9 DN |
1294 | |
1295 | ||
5006671f RG |
1296 | /* Unlink STMTs virtual definition from the IL by propagating its use. */ |
1297 | ||
1298 | void | |
355fe088 | 1299 | unlink_stmt_vdef (gimple *stmt) |
5006671f RG |
1300 | { |
1301 | use_operand_p use_p; | |
1302 | imm_use_iterator iter; | |
355fe088 | 1303 | gimple *use_stmt; |
5006671f | 1304 | tree vdef = gimple_vdef (stmt); |
b5b3ec3e | 1305 | tree vuse = gimple_vuse (stmt); |
5006671f RG |
1306 | |
1307 | if (!vdef | |
1308 | || TREE_CODE (vdef) != SSA_NAME) | |
1309 | return; | |
1310 | ||
b5b3ec3e | 1311 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef) |
5006671f RG |
1312 | { |
1313 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
b5b3ec3e | 1314 | SET_USE (use_p, vuse); |
5006671f | 1315 | } |
cfaab3a9 | 1316 | |
b5b3ec3e RG |
1317 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)) |
1318 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1; | |
cfaab3a9 | 1319 | } |
5006671f | 1320 | |
80560f95 AM |
1321 | /* Return true if the var whose chain of uses starts at PTR has a |
1322 | single nondebug use. Set USE_P and STMT to that single nondebug | |
1323 | use, if so, or to NULL otherwise. */ | |
1324 | bool | |
1325 | single_imm_use_1 (const ssa_use_operand_t *head, | |
355fe088 | 1326 | use_operand_p *use_p, gimple **stmt) |
80560f95 AM |
1327 | { |
1328 | ssa_use_operand_t *ptr, *single_use = 0; | |
1329 | ||
1330 | for (ptr = head->next; ptr != head; ptr = ptr->next) | |
f9ffade0 | 1331 | if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr))) |
80560f95 AM |
1332 | { |
1333 | if (single_use) | |
1334 | { | |
1335 | single_use = NULL; | |
1336 | break; | |
1337 | } | |
1338 | single_use = ptr; | |
1339 | } | |
1340 | ||
1341 | if (use_p) | |
1342 | *use_p = single_use; | |
1343 | ||
1344 | if (stmt) | |
1345 | *stmt = single_use ? single_use->loc.stmt : NULL; | |
1346 | ||
1347 | return single_use; | |
1348 | } | |
f9ffade0 | 1349 |