]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* SSA operands management for trees. |
20f06221 | 2 | Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc. |
6de9cd9a DN |
3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8 | the Free Software Foundation; either version 2, or (at your option) | |
9 | any later version. | |
10 | ||
11 | GCC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GCC; see the file COPYING. If not, write to | |
366ccddb KC |
18 | the Free Software Foundation, 51 Franklin Street, Fifth Floor, |
19 | Boston, MA 02110-1301, USA. */ | |
6de9cd9a DN |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
26 | #include "flags.h" | |
27 | #include "function.h" | |
28 | #include "diagnostic.h" | |
29 | #include "tree-flow.h" | |
30 | #include "tree-inline.h" | |
31 | #include "tree-pass.h" | |
32 | #include "ggc.h" | |
33 | #include "timevar.h" | |
4c714dd4 | 34 | #include "toplev.h" |
6674a6ce | 35 | #include "langhooks.h" |
ea900239 | 36 | #include "ipa-reference.h" |
1a24f92f | 37 | |
6cb38cd4 | 38 | /* This file contains the code required to manage the operands cache of the |
1a24f92f | 39 | SSA optimizer. For every stmt, we maintain an operand cache in the stmt |
6cb38cd4 | 40 | annotation. This cache contains operands that will be of interest to |
1a24f92f AM |
41 | optimizers and other passes wishing to manipulate the IL. |
42 | ||
43 | The operand type are broken up into REAL and VIRTUAL operands. The real | |
44 | operands are represented as pointers into the stmt's operand tree. Thus | |
45 | any manipulation of the real operands will be reflected in the actual tree. | |
46 | Virtual operands are represented solely in the cache, although the base | |
47 | variable for the SSA_NAME may, or may not occur in the stmt's tree. | |
48 | Manipulation of the virtual operands will not be reflected in the stmt tree. | |
49 | ||
50 | The routines in this file are concerned with creating this operand cache | |
51 | from a stmt tree. | |
52 | ||
1a24f92f | 53 | The operand tree is the parsed by the various get_* routines which look |
2a7e31df | 54 | through the stmt tree for the occurrence of operands which may be of |
1a24f92f AM |
55 | interest, and calls are made to the append_* routines whenever one is |
56 | found. There are 5 of these routines, each representing one of the | |
57 | 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and | |
58 | Virtual Must Defs. | |
59 | ||
60 | The append_* routines check for duplication, and simply keep a list of | |
61 | unique objects for each operand type in the build_* extendable vectors. | |
62 | ||
63 | Once the stmt tree is completely parsed, the finalize_ssa_operands() | |
64 | routine is called, which proceeds to perform the finalization routine | |
65 | on each of the 5 operand vectors which have been built up. | |
66 | ||
67 | If the stmt had a previous operand cache, the finalization routines | |
f3b569ca | 68 | attempt to match up the new operands with the old ones. If it's a perfect |
1a24f92f AM |
69 | match, the old vector is simply reused. If it isn't a perfect match, then |
70 | a new vector is created and the new operands are placed there. For | |
71 | virtual operands, if the previous cache had SSA_NAME version of a | |
72 | variable, and that same variable occurs in the same operands cache, then | |
73 | the new cache vector will also get the same SSA_NAME. | |
74 | ||
454ff5cb | 75 | i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand |
1a24f92f | 76 | vector for VUSE, then the new vector will also be modified such that |
02075bb2 | 77 | it contains 'a_5' rather than 'a'. */ |
1a24f92f | 78 | |
1e6a5d3c | 79 | /* Flags to describe operand properties in helpers. */ |
6de9cd9a DN |
80 | |
81 | /* By default, operands are loaded. */ | |
82 | #define opf_none 0 | |
83 | ||
a32b97a2 | 84 | /* Operand is the target of an assignment expression or a |
65ad7c63 | 85 | call-clobbered variable. */ |
6de9cd9a DN |
86 | #define opf_is_def (1 << 0) |
87 | ||
a32b97a2 | 88 | /* Operand is the target of an assignment expression. */ |
50dc9a88 | 89 | #define opf_kill_def (1 << 1) |
a32b97a2 | 90 | |
6de9cd9a DN |
91 | /* No virtual operands should be created in the expression. This is used |
92 | when traversing ADDR_EXPR nodes which have different semantics than | |
93 | other expressions. Inside an ADDR_EXPR node, the only operands that we | |
94 | need to consider are indices into arrays. For instance, &a.b[i] should | |
95 | generate a USE of 'i' but it should not generate a VUSE for 'a' nor a | |
96 | VUSE for 'b'. */ | |
50dc9a88 | 97 | #define opf_no_vops (1 << 2) |
6de9cd9a | 98 | |
65ad7c63 DN |
99 | /* Operand is a "non-specific" kill for call-clobbers and such. This |
100 | is used to distinguish "reset the world" events from explicit | |
101 | MODIFY_EXPRs. */ | |
0d2bf6f0 RH |
102 | #define opf_non_specific (1 << 3) |
103 | ||
6de9cd9a | 104 | /* Array for building all the def operands. */ |
f3940b0e | 105 | static VEC(tree,heap) *build_defs; |
6de9cd9a DN |
106 | |
107 | /* Array for building all the use operands. */ | |
f3940b0e | 108 | static VEC(tree,heap) *build_uses; |
6de9cd9a | 109 | |
65ad7c63 | 110 | /* Array for building all the V_MAY_DEF operands. */ |
f3940b0e | 111 | static VEC(tree,heap) *build_v_may_defs; |
6de9cd9a | 112 | |
65ad7c63 | 113 | /* Array for building all the VUSE operands. */ |
f3940b0e | 114 | static VEC(tree,heap) *build_vuses; |
6de9cd9a | 115 | |
65ad7c63 | 116 | /* Array for building all the V_MUST_DEF operands. */ |
f3940b0e | 117 | static VEC(tree,heap) *build_v_must_defs; |
a32b97a2 | 118 | |
6668f6a7 | 119 | /* These arrays are the cached operand vectors for call clobbered calls. */ |
f47c96aa | 120 | static bool ops_active = false; |
4c124b4c | 121 | |
f47c96aa AM |
122 | static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL; |
123 | static unsigned operand_memory_index; | |
4c124b4c | 124 | |
1a24f92f | 125 | static void get_expr_operands (tree, tree *, int); |
02075bb2 | 126 | |
f47c96aa AM |
127 | static def_optype_p free_defs = NULL; |
128 | static use_optype_p free_uses = NULL; | |
129 | static vuse_optype_p free_vuses = NULL; | |
130 | static maydef_optype_p free_maydefs = NULL; | |
131 | static mustdef_optype_p free_mustdefs = NULL; | |
1a24f92f | 132 | |
ac574e1b ZD |
133 | /* Allocates operand OP of given TYPE from the appropriate free list, |
134 | or of the new value if the list is empty. */ | |
135 | ||
136 | #define ALLOC_OPTYPE(OP, TYPE) \ | |
137 | do \ | |
138 | { \ | |
139 | TYPE##_optype_p ret = free_##TYPE##s; \ | |
140 | if (ret) \ | |
141 | free_##TYPE##s = ret->next; \ | |
142 | else \ | |
143 | ret = ssa_operand_alloc (sizeof (*ret)); \ | |
144 | (OP) = ret; \ | |
145 | } while (0) | |
1a24f92f | 146 | |
c83eecad | 147 | /* Return the DECL_UID of the base variable of T. */ |
1a24f92f | 148 | |
f47c96aa | 149 | static inline unsigned |
f3940b0e | 150 | get_name_decl (tree t) |
6de9cd9a | 151 | { |
f3940b0e AM |
152 | if (TREE_CODE (t) != SSA_NAME) |
153 | return DECL_UID (t); | |
154 | else | |
155 | return DECL_UID (SSA_NAME_VAR (t)); | |
6de9cd9a DN |
156 | } |
157 | ||
02075bb2 | 158 | |
f3940b0e | 159 | /* Comparison function for qsort used in operand_build_sort_virtual. */ |
1a24f92f | 160 | |
f3940b0e AM |
161 | static int |
162 | operand_build_cmp (const void *p, const void *q) | |
a32b97a2 | 163 | { |
f3940b0e AM |
164 | tree e1 = *((const tree *)p); |
165 | tree e2 = *((const tree *)q); | |
166 | unsigned int u1,u2; | |
167 | ||
168 | u1 = get_name_decl (e1); | |
169 | u2 = get_name_decl (e2); | |
f47c96aa | 170 | |
f3940b0e | 171 | /* We want to sort in ascending order. They can never be equal. */ |
f47c96aa | 172 | #ifdef ENABLE_CHECKING |
f3940b0e | 173 | gcc_assert (u1 != u2); |
f47c96aa | 174 | #endif |
f3940b0e | 175 | return (u1 > u2 ? 1 : -1); |
a32b97a2 BB |
176 | } |
177 | ||
02075bb2 | 178 | |
f3940b0e | 179 | /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */ |
1a24f92f | 180 | |
6de9cd9a | 181 | static inline void |
f3940b0e | 182 | operand_build_sort_virtual (VEC(tree,heap) *list) |
6de9cd9a | 183 | { |
f3940b0e | 184 | int num = VEC_length (tree, list); |
65ad7c63 | 185 | |
f3940b0e AM |
186 | if (num < 2) |
187 | return; | |
65ad7c63 | 188 | |
f3940b0e | 189 | if (num == 2) |
6de9cd9a | 190 | { |
f3940b0e AM |
191 | if (get_name_decl (VEC_index (tree, list, 0)) |
192 | > get_name_decl (VEC_index (tree, list, 1))) | |
193 | { | |
194 | /* Swap elements if in the wrong order. */ | |
195 | tree tmp = VEC_index (tree, list, 0); | |
196 | VEC_replace (tree, list, 0, VEC_index (tree, list, 1)); | |
197 | VEC_replace (tree, list, 1, tmp); | |
198 | } | |
f47c96aa | 199 | return; |
6de9cd9a | 200 | } |
65ad7c63 | 201 | |
f3940b0e AM |
202 | /* There are 3 or more elements, call qsort. */ |
203 | qsort (VEC_address (tree, list), | |
204 | VEC_length (tree, list), | |
205 | sizeof (tree), | |
206 | operand_build_cmp); | |
6de9cd9a DN |
207 | } |
208 | ||
f430bae8 | 209 | |
65ad7c63 | 210 | /* Return true if the SSA operands cache is active. */ |
1a24f92f | 211 | |
f47c96aa AM |
212 | bool |
213 | ssa_operands_active (void) | |
6de9cd9a | 214 | { |
f47c96aa AM |
215 | return ops_active; |
216 | } | |
6de9cd9a | 217 | |
02075bb2 | 218 | |
d16a5e36 DB |
219 | /* Structure storing statistics on how many call clobbers we have, and |
220 | how many where avoided. */ | |
02075bb2 | 221 | |
d16a5e36 DB |
222 | static struct |
223 | { | |
224 | /* Number of call-clobbered ops we attempt to add to calls in | |
225 | add_call_clobber_ops. */ | |
226 | unsigned int clobbered_vars; | |
227 | ||
65ad7c63 | 228 | /* Number of write-clobbers (V_MAY_DEFs) avoided by using |
d16a5e36 DB |
229 | not_written information. */ |
230 | unsigned int static_write_clobbers_avoided; | |
231 | ||
65ad7c63 | 232 | /* Number of reads (VUSEs) avoided by using not_read information. */ |
d16a5e36 DB |
233 | unsigned int static_read_clobbers_avoided; |
234 | ||
235 | /* Number of write-clobbers avoided because the variable can't escape to | |
236 | this call. */ | |
237 | unsigned int unescapable_clobbers_avoided; | |
6de9cd9a | 238 | |
65ad7c63 | 239 | /* Number of read-only uses we attempt to add to calls in |
d16a5e36 DB |
240 | add_call_read_ops. */ |
241 | unsigned int readonly_clobbers; | |
242 | ||
65ad7c63 | 243 | /* Number of read-only uses we avoid using not_read information. */ |
d16a5e36 DB |
244 | unsigned int static_readonly_clobbers_avoided; |
245 | } clobber_stats; | |
246 | ||
02075bb2 | 247 | |
f47c96aa AM |
248 | /* Initialize the operand cache routines. */ |
249 | ||
250 | void | |
251 | init_ssa_operands (void) | |
252 | { | |
f3940b0e AM |
253 | build_defs = VEC_alloc (tree, heap, 5); |
254 | build_uses = VEC_alloc (tree, heap, 10); | |
255 | build_vuses = VEC_alloc (tree, heap, 25); | |
256 | build_v_may_defs = VEC_alloc (tree, heap, 25); | |
257 | build_v_must_defs = VEC_alloc (tree, heap, 25); | |
258 | ||
f47c96aa AM |
259 | gcc_assert (operand_memory == NULL); |
260 | operand_memory_index = SSA_OPERAND_MEMORY_SIZE; | |
261 | ops_active = true; | |
d16a5e36 | 262 | memset (&clobber_stats, 0, sizeof (clobber_stats)); |
f47c96aa | 263 | } |
6de9cd9a | 264 | |
1a24f92f | 265 | |
f47c96aa AM |
266 | /* Dispose of anything required by the operand routines. */ |
267 | ||
268 | void | |
269 | fini_ssa_operands (void) | |
270 | { | |
271 | struct ssa_operand_memory_d *ptr; | |
f3940b0e AM |
272 | VEC_free (tree, heap, build_defs); |
273 | VEC_free (tree, heap, build_uses); | |
274 | VEC_free (tree, heap, build_v_must_defs); | |
275 | VEC_free (tree, heap, build_v_may_defs); | |
276 | VEC_free (tree, heap, build_vuses); | |
f47c96aa AM |
277 | free_defs = NULL; |
278 | free_uses = NULL; | |
279 | free_vuses = NULL; | |
280 | free_maydefs = NULL; | |
281 | free_mustdefs = NULL; | |
282 | while ((ptr = operand_memory) != NULL) | |
283 | { | |
284 | operand_memory = operand_memory->next; | |
285 | ggc_free (ptr); | |
1a24f92f AM |
286 | } |
287 | ||
f47c96aa | 288 | ops_active = false; |
d16a5e36 DB |
289 | |
290 | if (dump_file && (dump_flags & TDF_STATS)) | |
291 | { | |
02075bb2 DN |
292 | fprintf (dump_file, "Original clobbered vars:%d\n", |
293 | clobber_stats.clobbered_vars); | |
294 | fprintf (dump_file, "Static write clobbers avoided:%d\n", | |
295 | clobber_stats.static_write_clobbers_avoided); | |
296 | fprintf (dump_file, "Static read clobbers avoided:%d\n", | |
297 | clobber_stats.static_read_clobbers_avoided); | |
298 | fprintf (dump_file, "Unescapable clobbers avoided:%d\n", | |
299 | clobber_stats.unescapable_clobbers_avoided); | |
65ad7c63 | 300 | fprintf (dump_file, "Original read-only clobbers:%d\n", |
02075bb2 | 301 | clobber_stats.readonly_clobbers); |
65ad7c63 | 302 | fprintf (dump_file, "Static read-only clobbers avoided:%d\n", |
02075bb2 | 303 | clobber_stats.static_readonly_clobbers_avoided); |
d16a5e36 | 304 | } |
f47c96aa | 305 | } |
1a24f92f | 306 | |
6de9cd9a | 307 | |
f47c96aa AM |
308 | /* Return memory for operands of SIZE chunks. */ |
309 | ||
310 | static inline void * | |
311 | ssa_operand_alloc (unsigned size) | |
312 | { | |
313 | char *ptr; | |
314 | if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE) | |
315 | { | |
316 | struct ssa_operand_memory_d *ptr; | |
e1111e8e | 317 | ptr = GGC_NEW (struct ssa_operand_memory_d); |
f47c96aa AM |
318 | ptr->next = operand_memory; |
319 | operand_memory = ptr; | |
320 | operand_memory_index = 0; | |
321 | } | |
322 | ptr = &(operand_memory->mem[operand_memory_index]); | |
323 | operand_memory_index += size; | |
324 | return ptr; | |
6de9cd9a DN |
325 | } |
326 | ||
1a24f92f | 327 | |
f430bae8 | 328 | |
5dc2e333 | 329 | /* This routine makes sure that PTR is in an immediate use list, and makes |
6c00f606 | 330 | sure the stmt pointer is set to the current stmt. */ |
02075bb2 | 331 | |
5dc2e333 AM |
332 | static inline void |
333 | set_virtual_use_link (use_operand_p ptr, tree stmt) | |
334 | { | |
65ad7c63 | 335 | /* fold_stmt may have changed the stmt pointers. */ |
5dc2e333 AM |
336 | if (ptr->stmt != stmt) |
337 | ptr->stmt = stmt; | |
338 | ||
339 | /* If this use isn't in a list, add it to the correct list. */ | |
340 | if (!ptr->prev) | |
341 | link_imm_use (ptr, *(ptr->use)); | |
342 | } | |
343 | ||
ac574e1b ZD |
344 | /* Appends ELT after TO, and moves the TO pointer to ELT. */ |
345 | ||
346 | #define APPEND_OP_AFTER(ELT, TO) \ | |
347 | do \ | |
348 | { \ | |
349 | (TO)->next = (ELT); \ | |
350 | (TO) = (ELT); \ | |
351 | } while (0) | |
352 | ||
353 | /* Appends head of list FROM after TO, and move both pointers | |
354 | to their successors. */ | |
355 | ||
356 | #define MOVE_HEAD_AFTER(FROM, TO) \ | |
357 | do \ | |
358 | { \ | |
359 | APPEND_OP_AFTER (FROM, TO); \ | |
360 | (FROM) = (FROM)->next; \ | |
361 | } while (0) | |
362 | ||
363 | /* Moves OP to appropriate freelist. OP is set to its successor. */ | |
364 | ||
365 | #define MOVE_HEAD_TO_FREELIST(OP, TYPE) \ | |
366 | do \ | |
367 | { \ | |
368 | TYPE##_optype_p next = (OP)->next; \ | |
369 | (OP)->next = free_##TYPE##s; \ | |
370 | free_##TYPE##s = (OP); \ | |
371 | (OP) = next; \ | |
372 | } while (0) | |
373 | ||
374 | /* Initializes immediate use at USE_PTR to value VAL, and links it to the list | |
917f1b7e | 375 | of immediate uses. STMT is the current statement. */ |
ac574e1b ZD |
376 | |
377 | #define INITIALIZE_USE(USE_PTR, VAL, STMT) \ | |
378 | do \ | |
379 | { \ | |
380 | (USE_PTR)->use = (VAL); \ | |
381 | link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \ | |
382 | } while (0) | |
383 | ||
384 | /* Adds OP to the list of defs after LAST, and moves | |
385 | LAST to the new element. */ | |
5dc2e333 | 386 | |
ac574e1b ZD |
387 | static inline void |
388 | add_def_op (tree *op, def_optype_p *last) | |
389 | { | |
390 | def_optype_p new; | |
391 | ||
392 | ALLOC_OPTYPE (new, def); | |
393 | DEF_OP_PTR (new) = op; | |
394 | APPEND_OP_AFTER (new, *last); | |
395 | } | |
396 | ||
397 | /* Adds OP to the list of uses of statement STMT after LAST, and moves | |
398 | LAST to the new element. */ | |
399 | ||
400 | static inline void | |
401 | add_use_op (tree stmt, tree *op, use_optype_p *last) | |
402 | { | |
403 | use_optype_p new; | |
404 | ||
405 | ALLOC_OPTYPE (new, use); | |
406 | INITIALIZE_USE (USE_OP_PTR (new), op, stmt); | |
407 | APPEND_OP_AFTER (new, *last); | |
408 | } | |
409 | ||
410 | /* Adds OP to the list of vuses of statement STMT after LAST, and moves | |
411 | LAST to the new element. */ | |
412 | ||
413 | static inline void | |
414 | add_vuse_op (tree stmt, tree op, vuse_optype_p *last) | |
415 | { | |
416 | vuse_optype_p new; | |
417 | ||
418 | ALLOC_OPTYPE (new, vuse); | |
419 | VUSE_OP (new) = op; | |
420 | INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt); | |
421 | APPEND_OP_AFTER (new, *last); | |
422 | } | |
423 | ||
424 | /* Adds OP to the list of maydefs of statement STMT after LAST, and moves | |
425 | LAST to the new element. */ | |
426 | ||
427 | static inline void | |
428 | add_maydef_op (tree stmt, tree op, maydef_optype_p *last) | |
429 | { | |
430 | maydef_optype_p new; | |
431 | ||
432 | ALLOC_OPTYPE (new, maydef); | |
433 | MAYDEF_RESULT (new) = op; | |
434 | MAYDEF_OP (new) = op; | |
435 | INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt); | |
436 | APPEND_OP_AFTER (new, *last); | |
437 | } | |
438 | ||
439 | /* Adds OP to the list of mustdefs of statement STMT after LAST, and moves | |
440 | LAST to the new element. */ | |
441 | ||
442 | static inline void | |
443 | add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last) | |
444 | { | |
445 | mustdef_optype_p new; | |
446 | ||
447 | ALLOC_OPTYPE (new, mustdef); | |
448 | MUSTDEF_RESULT (new) = op; | |
449 | MUSTDEF_KILL (new) = op; | |
450 | INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt); | |
451 | APPEND_OP_AFTER (new, *last); | |
452 | } | |
453 | ||
454 | /* Takes elements from build_defs and turns them into def operands of STMT. | |
917f1b7e | 455 | TODO -- Given that def operands list is not necessarily sorted, merging |
ac574e1b ZD |
456 | the operands this way does not make much sense. |
457 | -- Make build_defs VEC of tree *. */ | |
458 | ||
459 | static inline void | |
460 | finalize_ssa_def_ops (tree stmt) | |
461 | { | |
462 | unsigned new_i; | |
463 | struct def_optype_d new_list; | |
6677e189 | 464 | def_optype_p old_ops, last; |
ac574e1b ZD |
465 | tree *old_base; |
466 | ||
467 | new_list.next = NULL; | |
468 | last = &new_list; | |
469 | ||
470 | old_ops = DEF_OPS (stmt); | |
471 | ||
472 | new_i = 0; | |
473 | while (old_ops && new_i < VEC_length (tree, build_defs)) | |
474 | { | |
475 | tree *new_base = (tree *) VEC_index (tree, build_defs, new_i); | |
476 | old_base = DEF_OP_PTR (old_ops); | |
477 | ||
478 | if (old_base == new_base) | |
479 | { | |
480 | /* if variables are the same, reuse this node. */ | |
481 | MOVE_HEAD_AFTER (old_ops, last); | |
482 | new_i++; | |
483 | } | |
484 | else if (old_base < new_base) | |
485 | { | |
486 | /* if old is less than new, old goes to the free list. */ | |
487 | MOVE_HEAD_TO_FREELIST (old_ops, def); | |
488 | } | |
489 | else | |
490 | { | |
491 | /* This is a new operand. */ | |
492 | add_def_op (new_base, &last); | |
493 | new_i++; | |
494 | } | |
495 | } | |
496 | ||
497 | /* If there is anything remaining in the build_defs list, simply emit it. */ | |
498 | for ( ; new_i < VEC_length (tree, build_defs); new_i++) | |
499 | add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last); | |
1a24f92f | 500 | |
ac574e1b ZD |
501 | last->next = NULL; |
502 | ||
503 | /* If there is anything in the old list, free it. */ | |
504 | if (old_ops) | |
505 | { | |
506 | old_ops->next = free_defs; | |
507 | free_defs = old_ops; | |
508 | } | |
509 | ||
510 | /* Now set the stmt's operands. */ | |
511 | DEF_OPS (stmt) = new_list.next; | |
512 | ||
513 | #ifdef ENABLE_CHECKING | |
514 | { | |
6677e189 | 515 | def_optype_p ptr; |
ac574e1b ZD |
516 | unsigned x = 0; |
517 | for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next) | |
518 | x++; | |
519 | ||
520 | gcc_assert (x == VEC_length (tree, build_defs)); | |
521 | } | |
522 | #endif | |
523 | } | |
f47c96aa AM |
524 | |
525 | /* This routine will create stmt operands for STMT from the def build list. */ | |
526 | ||
527 | static void | |
528 | finalize_ssa_defs (tree stmt) | |
6de9cd9a | 529 | { |
f3940b0e | 530 | unsigned int num = VEC_length (tree, build_defs); |
02075bb2 | 531 | |
f47c96aa AM |
532 | /* There should only be a single real definition per assignment. */ |
533 | gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1); | |
6de9cd9a | 534 | |
f47c96aa AM |
535 | /* If there is an old list, often the new list is identical, or close, so |
536 | find the elements at the beginning that are the same as the vector. */ | |
f47c96aa | 537 | finalize_ssa_def_ops (stmt); |
f3940b0e | 538 | VEC_truncate (tree, build_defs, 0); |
f47c96aa | 539 | } |
6de9cd9a | 540 | |
ac574e1b | 541 | /* Takes elements from build_uses and turns them into use operands of STMT. |
6c00f606 | 542 | TODO -- Make build_uses VEC of tree *. */ |
ac574e1b ZD |
543 | |
544 | static inline void | |
545 | finalize_ssa_use_ops (tree stmt) | |
546 | { | |
547 | unsigned new_i; | |
548 | struct use_optype_d new_list; | |
549 | use_optype_p old_ops, ptr, last; | |
ac574e1b ZD |
550 | |
551 | new_list.next = NULL; | |
552 | last = &new_list; | |
553 | ||
554 | old_ops = USE_OPS (stmt); | |
555 | ||
ac574e1b ZD |
556 | /* If there is anything in the old list, free it. */ |
557 | if (old_ops) | |
558 | { | |
559 | for (ptr = old_ops; ptr; ptr = ptr->next) | |
560 | delink_imm_use (USE_OP_PTR (ptr)); | |
561 | old_ops->next = free_uses; | |
562 | free_uses = old_ops; | |
563 | } | |
564 | ||
6c00f606 AM |
565 | /* Now create nodes for all the new nodes. */ |
566 | for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++) | |
567 | add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last); | |
568 | ||
569 | last->next = NULL; | |
570 | ||
ac574e1b ZD |
571 | /* Now set the stmt's operands. */ |
572 | USE_OPS (stmt) = new_list.next; | |
573 | ||
574 | #ifdef ENABLE_CHECKING | |
575 | { | |
576 | unsigned x = 0; | |
577 | for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next) | |
578 | x++; | |
579 | ||
580 | gcc_assert (x == VEC_length (tree, build_uses)); | |
581 | } | |
582 | #endif | |
583 | } | |
f47c96aa AM |
584 | |
585 | /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */ | |
586 | ||
587 | static void | |
588 | finalize_ssa_uses (tree stmt) | |
589 | { | |
6de9cd9a DN |
590 | #ifdef ENABLE_CHECKING |
591 | { | |
592 | unsigned x; | |
f3940b0e | 593 | unsigned num = VEC_length (tree, build_uses); |
f47c96aa | 594 | |
6de9cd9a | 595 | /* If the pointer to the operand is the statement itself, something is |
f47c96aa | 596 | wrong. It means that we are pointing to a local variable (the |
65ad7c63 | 597 | initial call to update_stmt_operands does not pass a pointer to a |
f47c96aa | 598 | statement). */ |
6de9cd9a | 599 | for (x = 0; x < num; x++) |
f3940b0e | 600 | gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt); |
6de9cd9a DN |
601 | } |
602 | #endif | |
f47c96aa | 603 | finalize_ssa_use_ops (stmt); |
f3940b0e | 604 | VEC_truncate (tree, build_uses, 0); |
6de9cd9a | 605 | } |
ac574e1b ZD |
606 | |
607 | ||
608 | /* Takes elements from build_v_may_defs and turns them into maydef operands of | |
609 | STMT. */ | |
610 | ||
611 | static inline void | |
612 | finalize_ssa_v_may_def_ops (tree stmt) | |
613 | { | |
614 | unsigned new_i; | |
615 | struct maydef_optype_d new_list; | |
616 | maydef_optype_p old_ops, ptr, last; | |
617 | tree act; | |
618 | unsigned old_base, new_base; | |
619 | ||
620 | new_list.next = NULL; | |
621 | last = &new_list; | |
622 | ||
623 | old_ops = MAYDEF_OPS (stmt); | |
624 | ||
625 | new_i = 0; | |
626 | while (old_ops && new_i < VEC_length (tree, build_v_may_defs)) | |
627 | { | |
628 | act = VEC_index (tree, build_v_may_defs, new_i); | |
629 | new_base = get_name_decl (act); | |
630 | old_base = get_name_decl (MAYDEF_OP (old_ops)); | |
631 | ||
632 | if (old_base == new_base) | |
633 | { | |
634 | /* if variables are the same, reuse this node. */ | |
635 | MOVE_HEAD_AFTER (old_ops, last); | |
636 | set_virtual_use_link (MAYDEF_OP_PTR (last), stmt); | |
637 | new_i++; | |
638 | } | |
639 | else if (old_base < new_base) | |
640 | { | |
641 | /* if old is less than new, old goes to the free list. */ | |
642 | delink_imm_use (MAYDEF_OP_PTR (old_ops)); | |
643 | MOVE_HEAD_TO_FREELIST (old_ops, maydef); | |
644 | } | |
645 | else | |
646 | { | |
647 | /* This is a new operand. */ | |
648 | add_maydef_op (stmt, act, &last); | |
649 | new_i++; | |
650 | } | |
651 | } | |
652 | ||
653 | /* If there is anything remaining in the build_v_may_defs list, simply emit it. */ | |
654 | for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++) | |
655 | add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last); | |
656 | ||
657 | last->next = NULL; | |
658 | ||
659 | /* If there is anything in the old list, free it. */ | |
660 | if (old_ops) | |
661 | { | |
662 | for (ptr = old_ops; ptr; ptr = ptr->next) | |
663 | delink_imm_use (MAYDEF_OP_PTR (ptr)); | |
664 | old_ops->next = free_maydefs; | |
665 | free_maydefs = old_ops; | |
666 | } | |
667 | ||
668 | /* Now set the stmt's operands. */ | |
669 | MAYDEF_OPS (stmt) = new_list.next; | |
670 | ||
671 | #ifdef ENABLE_CHECKING | |
672 | { | |
673 | unsigned x = 0; | |
674 | for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next) | |
675 | x++; | |
676 | ||
677 | gcc_assert (x == VEC_length (tree, build_v_may_defs)); | |
678 | } | |
679 | #endif | |
680 | } | |
681 | ||
f47c96aa AM |
682 | static void |
683 | finalize_ssa_v_may_defs (tree stmt) | |
6de9cd9a | 684 | { |
f47c96aa | 685 | finalize_ssa_v_may_def_ops (stmt); |
6de9cd9a | 686 | } |
f47c96aa | 687 | |
6de9cd9a | 688 | |
65ad7c63 | 689 | /* Clear the in_list bits and empty the build array for V_MAY_DEFs. */ |
e288e2f5 AM |
690 | |
691 | static inline void | |
692 | cleanup_v_may_defs (void) | |
693 | { | |
694 | unsigned x, num; | |
f3940b0e | 695 | num = VEC_length (tree, build_v_may_defs); |
e288e2f5 AM |
696 | |
697 | for (x = 0; x < num; x++) | |
698 | { | |
f3940b0e | 699 | tree t = VEC_index (tree, build_v_may_defs, x); |
f47c96aa AM |
700 | if (TREE_CODE (t) != SSA_NAME) |
701 | { | |
702 | var_ann_t ann = var_ann (t); | |
703 | ann->in_v_may_def_list = 0; | |
704 | } | |
e288e2f5 | 705 | } |
f3940b0e | 706 | VEC_truncate (tree, build_v_may_defs, 0); |
f47c96aa AM |
707 | } |
708 | ||
e288e2f5 | 709 | |
ac574e1b ZD |
710 | /* Takes elements from build_vuses and turns them into vuse operands of |
711 | STMT. */ | |
712 | ||
713 | static inline void | |
714 | finalize_ssa_vuse_ops (tree stmt) | |
715 | { | |
716 | unsigned new_i; | |
717 | struct vuse_optype_d new_list; | |
718 | vuse_optype_p old_ops, ptr, last; | |
719 | tree act; | |
720 | unsigned old_base, new_base; | |
721 | ||
722 | new_list.next = NULL; | |
723 | last = &new_list; | |
724 | ||
725 | old_ops = VUSE_OPS (stmt); | |
726 | ||
727 | new_i = 0; | |
728 | while (old_ops && new_i < VEC_length (tree, build_vuses)) | |
729 | { | |
730 | act = VEC_index (tree, build_vuses, new_i); | |
731 | new_base = get_name_decl (act); | |
732 | old_base = get_name_decl (VUSE_OP (old_ops)); | |
1a24f92f | 733 | |
ac574e1b ZD |
734 | if (old_base == new_base) |
735 | { | |
736 | /* if variables are the same, reuse this node. */ | |
737 | MOVE_HEAD_AFTER (old_ops, last); | |
738 | set_virtual_use_link (VUSE_OP_PTR (last), stmt); | |
739 | new_i++; | |
740 | } | |
741 | else if (old_base < new_base) | |
742 | { | |
743 | /* if old is less than new, old goes to the free list. */ | |
744 | delink_imm_use (USE_OP_PTR (old_ops)); | |
745 | MOVE_HEAD_TO_FREELIST (old_ops, vuse); | |
746 | } | |
747 | else | |
748 | { | |
749 | /* This is a new operand. */ | |
750 | add_vuse_op (stmt, act, &last); | |
751 | new_i++; | |
752 | } | |
753 | } | |
754 | ||
755 | /* If there is anything remaining in the build_vuses list, simply emit it. */ | |
756 | for ( ; new_i < VEC_length (tree, build_vuses); new_i++) | |
757 | add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last); | |
758 | ||
759 | last->next = NULL; | |
760 | ||
761 | /* If there is anything in the old list, free it. */ | |
762 | if (old_ops) | |
763 | { | |
764 | for (ptr = old_ops; ptr; ptr = ptr->next) | |
765 | delink_imm_use (VUSE_OP_PTR (ptr)); | |
766 | old_ops->next = free_vuses; | |
767 | free_vuses = old_ops; | |
768 | } | |
769 | ||
770 | /* Now set the stmt's operands. */ | |
771 | VUSE_OPS (stmt) = new_list.next; | |
772 | ||
773 | #ifdef ENABLE_CHECKING | |
774 | { | |
775 | unsigned x = 0; | |
776 | for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next) | |
777 | x++; | |
778 | ||
779 | gcc_assert (x == VEC_length (tree, build_vuses)); | |
780 | } | |
781 | #endif | |
782 | } | |
783 | ||
65ad7c63 | 784 | /* Return a new VUSE operand vector, comparing to OLD_OPS_P. */ |
f47c96aa AM |
785 | |
786 | static void | |
787 | finalize_ssa_vuses (tree stmt) | |
1a24f92f | 788 | { |
f47c96aa | 789 | unsigned num, num_v_may_defs; |
f3940b0e | 790 | unsigned vuse_index; |
6de9cd9a DN |
791 | |
792 | /* Remove superfluous VUSE operands. If the statement already has a | |
65ad7c63 DN |
793 | V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is |
794 | not needed because V_MAY_DEFs imply a VUSE of the variable. For | |
795 | instance, suppose that variable 'a' is aliased: | |
6de9cd9a DN |
796 | |
797 | # VUSE <a_2> | |
a32b97a2 | 798 | # a_3 = V_MAY_DEF <a_2> |
6de9cd9a DN |
799 | a = a + 1; |
800 | ||
65ad7c63 DN |
801 | The VUSE <a_2> is superfluous because it is implied by the |
802 | V_MAY_DEF operation. */ | |
f3940b0e AM |
803 | num = VEC_length (tree, build_vuses); |
804 | num_v_may_defs = VEC_length (tree, build_v_may_defs); | |
1a24f92f | 805 | |
f47c96aa | 806 | if (num > 0 && num_v_may_defs > 0) |
6de9cd9a | 807 | { |
f3940b0e | 808 | for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); ) |
f47c96aa AM |
809 | { |
810 | tree vuse; | |
f3940b0e | 811 | vuse = VEC_index (tree, build_vuses, vuse_index); |
e288e2f5 | 812 | if (TREE_CODE (vuse) != SSA_NAME) |
6de9cd9a | 813 | { |
e288e2f5 AM |
814 | var_ann_t ann = var_ann (vuse); |
815 | ann->in_vuse_list = 0; | |
816 | if (ann->in_v_may_def_list) | |
817 | { | |
f3940b0e | 818 | VEC_ordered_remove (tree, build_vuses, vuse_index); |
f47c96aa | 819 | continue; |
6de9cd9a | 820 | } |
6de9cd9a | 821 | } |
f3940b0e | 822 | vuse_index++; |
6de9cd9a DN |
823 | } |
824 | } | |
e288e2f5 | 825 | else |
65ad7c63 DN |
826 | { |
827 | /* Clear out the in_list bits. */ | |
828 | for (vuse_index = 0; | |
829 | vuse_index < VEC_length (tree, build_vuses); | |
830 | vuse_index++) | |
831 | { | |
832 | tree t = VEC_index (tree, build_vuses, vuse_index); | |
833 | if (TREE_CODE (t) != SSA_NAME) | |
834 | { | |
835 | var_ann_t ann = var_ann (t); | |
836 | ann->in_vuse_list = 0; | |
837 | } | |
838 | } | |
839 | } | |
e288e2f5 | 840 | |
f47c96aa | 841 | finalize_ssa_vuse_ops (stmt); |
65ad7c63 DN |
842 | |
843 | /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */ | |
e288e2f5 | 844 | cleanup_v_may_defs (); |
f47c96aa | 845 | |
65ad7c63 | 846 | /* Free the VUSEs build vector. */ |
f3940b0e | 847 | VEC_truncate (tree, build_vuses, 0); |
1a24f92f | 848 | |
6de9cd9a | 849 | } |
1a24f92f | 850 | |
ac574e1b ZD |
851 | /* Takes elements from build_v_must_defs and turns them into mustdef operands of |
852 | STMT. */ | |
853 | ||
854 | static inline void | |
855 | finalize_ssa_v_must_def_ops (tree stmt) | |
856 | { | |
857 | unsigned new_i; | |
858 | struct mustdef_optype_d new_list; | |
859 | mustdef_optype_p old_ops, ptr, last; | |
860 | tree act; | |
861 | unsigned old_base, new_base; | |
862 | ||
863 | new_list.next = NULL; | |
864 | last = &new_list; | |
865 | ||
866 | old_ops = MUSTDEF_OPS (stmt); | |
867 | ||
868 | new_i = 0; | |
869 | while (old_ops && new_i < VEC_length (tree, build_v_must_defs)) | |
870 | { | |
871 | act = VEC_index (tree, build_v_must_defs, new_i); | |
872 | new_base = get_name_decl (act); | |
873 | old_base = get_name_decl (MUSTDEF_KILL (old_ops)); | |
874 | ||
875 | if (old_base == new_base) | |
876 | { | |
877 | /* If variables are the same, reuse this node. */ | |
878 | MOVE_HEAD_AFTER (old_ops, last); | |
879 | set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt); | |
880 | new_i++; | |
881 | } | |
882 | else if (old_base < new_base) | |
883 | { | |
884 | /* If old is less than new, old goes to the free list. */ | |
885 | delink_imm_use (MUSTDEF_KILL_PTR (old_ops)); | |
886 | MOVE_HEAD_TO_FREELIST (old_ops, mustdef); | |
887 | } | |
888 | else | |
889 | { | |
890 | /* This is a new operand. */ | |
891 | add_mustdef_op (stmt, act, &last); | |
892 | new_i++; | |
893 | } | |
894 | } | |
895 | ||
896 | /* If there is anything remaining in the build_v_must_defs list, simply emit it. */ | |
897 | for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++) | |
898 | add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last); | |
899 | ||
900 | last->next = NULL; | |
901 | ||
902 | /* If there is anything in the old list, free it. */ | |
903 | if (old_ops) | |
904 | { | |
905 | for (ptr = old_ops; ptr; ptr = ptr->next) | |
906 | delink_imm_use (MUSTDEF_KILL_PTR (ptr)); | |
907 | old_ops->next = free_mustdefs; | |
908 | free_mustdefs = old_ops; | |
909 | } | |
910 | ||
911 | /* Now set the stmt's operands. */ | |
912 | MUSTDEF_OPS (stmt) = new_list.next; | |
913 | ||
914 | #ifdef ENABLE_CHECKING | |
915 | { | |
916 | unsigned x = 0; | |
917 | for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next) | |
918 | x++; | |
919 | ||
920 | gcc_assert (x == VEC_length (tree, build_v_must_defs)); | |
921 | } | |
922 | #endif | |
923 | } | |
a32b97a2 | 924 | |
f47c96aa AM |
925 | static void |
926 | finalize_ssa_v_must_defs (tree stmt) | |
927 | { | |
65ad7c63 DN |
928 | /* In the presence of subvars, there may be more than one V_MUST_DEF |
929 | per statement (one for each subvar). It is a bit expensive to | |
930 | verify that all must-defs in a statement belong to subvars if | |
931 | there is more than one must-def, so we don't do it. Suffice to | |
932 | say, if you reach here without having subvars, and have num >1, | |
933 | you have hit a bug. */ | |
f47c96aa | 934 | finalize_ssa_v_must_def_ops (stmt); |
f3940b0e | 935 | VEC_truncate (tree, build_v_must_defs, 0); |
a32b97a2 BB |
936 | } |
937 | ||
6de9cd9a | 938 | |
1a24f92f | 939 | /* Finalize all the build vectors, fill the new ones into INFO. */ |
f47c96aa | 940 | |
1a24f92f | 941 | static inline void |
f47c96aa | 942 | finalize_ssa_stmt_operands (tree stmt) |
1a24f92f | 943 | { |
f47c96aa AM |
944 | finalize_ssa_defs (stmt); |
945 | finalize_ssa_uses (stmt); | |
946 | finalize_ssa_v_must_defs (stmt); | |
947 | finalize_ssa_v_may_defs (stmt); | |
948 | finalize_ssa_vuses (stmt); | |
6de9cd9a DN |
949 | } |
950 | ||
951 | ||
1a24f92f AM |
952 | /* Start the process of building up operands vectors in INFO. */ |
953 | ||
954 | static inline void | |
955 | start_ssa_stmt_operands (void) | |
6de9cd9a | 956 | { |
f3940b0e AM |
957 | gcc_assert (VEC_length (tree, build_defs) == 0); |
958 | gcc_assert (VEC_length (tree, build_uses) == 0); | |
959 | gcc_assert (VEC_length (tree, build_vuses) == 0); | |
960 | gcc_assert (VEC_length (tree, build_v_may_defs) == 0); | |
961 | gcc_assert (VEC_length (tree, build_v_must_defs) == 0); | |
6de9cd9a DN |
962 | } |
963 | ||
964 | ||
1a24f92f | 965 | /* Add DEF_P to the list of pointers to operands. */ |
6de9cd9a DN |
966 | |
967 | static inline void | |
1a24f92f | 968 | append_def (tree *def_p) |
6de9cd9a | 969 | { |
f3940b0e | 970 | VEC_safe_push (tree, heap, build_defs, (tree)def_p); |
6de9cd9a DN |
971 | } |
972 | ||
973 | ||
1a24f92f | 974 | /* Add USE_P to the list of pointers to operands. */ |
6de9cd9a DN |
975 | |
976 | static inline void | |
1a24f92f | 977 | append_use (tree *use_p) |
6de9cd9a | 978 | { |
f3940b0e | 979 | VEC_safe_push (tree, heap, build_uses, (tree)use_p); |
6de9cd9a DN |
980 | } |
981 | ||
982 | ||
1a24f92f | 983 | /* Add a new virtual may def for variable VAR to the build array. */ |
6de9cd9a | 984 | |
1a24f92f AM |
985 | static inline void |
986 | append_v_may_def (tree var) | |
6de9cd9a | 987 | { |
f47c96aa AM |
988 | if (TREE_CODE (var) != SSA_NAME) |
989 | { | |
990 | var_ann_t ann = get_var_ann (var); | |
6de9cd9a | 991 | |
f47c96aa AM |
992 | /* Don't allow duplicate entries. */ |
993 | if (ann->in_v_may_def_list) | |
994 | return; | |
995 | ann->in_v_may_def_list = 1; | |
996 | } | |
6de9cd9a | 997 | |
f3940b0e | 998 | VEC_safe_push (tree, heap, build_v_may_defs, (tree)var); |
6de9cd9a DN |
999 | } |
1000 | ||
1001 | ||
1a24f92f | 1002 | /* Add VAR to the list of virtual uses. */ |
6de9cd9a | 1003 | |
1a24f92f AM |
1004 | static inline void |
1005 | append_vuse (tree var) | |
6de9cd9a | 1006 | { |
6de9cd9a | 1007 | /* Don't allow duplicate entries. */ |
e288e2f5 AM |
1008 | if (TREE_CODE (var) != SSA_NAME) |
1009 | { | |
1010 | var_ann_t ann = get_var_ann (var); | |
1011 | ||
1012 | if (ann->in_vuse_list || ann->in_v_may_def_list) | |
1013 | return; | |
1014 | ann->in_vuse_list = 1; | |
1015 | } | |
6de9cd9a | 1016 | |
f3940b0e | 1017 | VEC_safe_push (tree, heap, build_vuses, (tree)var); |
6de9cd9a DN |
1018 | } |
1019 | ||
a32b97a2 | 1020 | |
1a24f92f | 1021 | /* Add VAR to the list of virtual must definitions for INFO. */ |
a32b97a2 | 1022 | |
1a24f92f AM |
1023 | static inline void |
1024 | append_v_must_def (tree var) | |
1025 | { | |
1026 | unsigned i; | |
a32b97a2 BB |
1027 | |
1028 | /* Don't allow duplicate entries. */ | |
f3940b0e AM |
1029 | for (i = 0; i < VEC_length (tree, build_v_must_defs); i++) |
1030 | if (var == VEC_index (tree, build_v_must_defs, i)) | |
1a24f92f | 1031 | return; |
a32b97a2 | 1032 | |
f3940b0e | 1033 | VEC_safe_push (tree, heap, build_v_must_defs, (tree)var); |
a32b97a2 BB |
1034 | } |
1035 | ||
6de9cd9a | 1036 | |
02075bb2 DN |
1037 | /* REF is a tree that contains the entire pointer dereference |
1038 | expression, if available, or NULL otherwise. ALIAS is the variable | |
1039 | we are asking if REF can access. OFFSET and SIZE come from the | |
1040 | memory access expression that generated this virtual operand. | |
1041 | FOR_CLOBBER is true is this is adding a virtual operand for a call | |
1042 | clobber. */ | |
9390c347 | 1043 | |
02075bb2 DN |
1044 | static bool |
1045 | access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset, | |
1046 | HOST_WIDE_INT size) | |
1047 | { | |
1048 | bool offsetgtz = offset > 0; | |
1049 | unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset; | |
1050 | tree base = ref ? get_base_address (ref) : NULL; | |
6de9cd9a | 1051 | |
02075bb2 DN |
1052 | /* If ALIAS is an SFT, it can't be touched if the offset |
1053 | and size of the access is not overlapping with the SFT offset and | |
1054 | size. This is only true if we are accessing through a pointer | |
1055 | to a type that is the same as SFT_PARENT_VAR. Otherwise, we may | |
1056 | be accessing through a pointer to some substruct of the | |
1057 | structure, and if we try to prune there, we will have the wrong | |
1058 | offset, and get the wrong answer. | |
1059 | i.e., we can't prune without more work if we have something like | |
6de9cd9a | 1060 | |
02075bb2 DN |
1061 | struct gcc_target |
1062 | { | |
1063 | struct asm_out | |
1064 | { | |
1065 | const char *byte_op; | |
1066 | struct asm_int_op | |
1067 | { | |
1068 | const char *hi; | |
1069 | } aligned_op; | |
1070 | } asm_out; | |
1071 | } targetm; | |
1072 | ||
1073 | foo = &targetm.asm_out.aligned_op; | |
1074 | return foo->hi; | |
6de9cd9a | 1075 | |
02075bb2 DN |
1076 | SFT.1, which represents hi, will have SFT_OFFSET=32 because in |
1077 | terms of SFT_PARENT_VAR, that is where it is. | |
1078 | However, the access through the foo pointer will be at offset 0. */ | |
1079 | if (size != -1 | |
1080 | && TREE_CODE (alias) == STRUCT_FIELD_TAG | |
1081 | && base | |
1082 | && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias)) | |
1083 | && !overlap_subvar (offset, size, alias, NULL)) | |
1084 | { | |
1085 | #ifdef ACCESS_DEBUGGING | |
1086 | fprintf (stderr, "Access to "); | |
1087 | print_generic_expr (stderr, ref, 0); | |
1088 | fprintf (stderr, " may not touch "); | |
1089 | print_generic_expr (stderr, alias, 0); | |
1090 | fprintf (stderr, " in function %s\n", get_name (current_function_decl)); | |
1091 | #endif | |
1092 | return false; | |
1093 | } | |
6de9cd9a | 1094 | |
02075bb2 DN |
1095 | /* Without strict aliasing, it is impossible for a component access |
1096 | through a pointer to touch a random variable, unless that | |
1097 | variable *is* a structure or a pointer. | |
6de9cd9a | 1098 | |
02075bb2 DN |
1099 | That is, given p->c, and some random global variable b, |
1100 | there is no legal way that p->c could be an access to b. | |
1101 | ||
1102 | Without strict aliasing on, we consider it legal to do something | |
1103 | like: | |
6de9cd9a | 1104 | |
02075bb2 DN |
1105 | struct foos { int l; }; |
1106 | int foo; | |
1107 | static struct foos *getfoo(void); | |
1108 | int main (void) | |
1109 | { | |
1110 | struct foos *f = getfoo(); | |
1111 | f->l = 1; | |
1112 | foo = 2; | |
1113 | if (f->l == 1) | |
1114 | abort(); | |
1115 | exit(0); | |
1116 | } | |
1117 | static struct foos *getfoo(void) | |
1118 | { return (struct foos *)&foo; } | |
1119 | ||
1120 | (taken from 20000623-1.c) | |
832a0c1d DB |
1121 | |
1122 | The docs also say/imply that access through union pointers | |
1123 | is legal (but *not* if you take the address of the union member, | |
1124 | i.e. the inverse), such that you can do | |
1125 | ||
1126 | typedef union { | |
1127 | int d; | |
1128 | } U; | |
1129 | ||
1130 | int rv; | |
1131 | void breakme() | |
1132 | { | |
1133 | U *rv0; | |
1134 | U *pretmp = (U*)&rv; | |
1135 | rv0 = pretmp; | |
1136 | rv0->d = 42; | |
1137 | } | |
1138 | To implement this, we just punt on accesses through union | |
1139 | pointers entirely. | |
02075bb2 DN |
1140 | */ |
1141 | else if (ref | |
1142 | && flag_strict_aliasing | |
1143 | && TREE_CODE (ref) != INDIRECT_REF | |
1144 | && !MTAG_P (alias) | |
832a0c1d DB |
1145 | && (TREE_CODE (base) != INDIRECT_REF |
1146 | || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE) | |
02075bb2 DN |
1147 | && !AGGREGATE_TYPE_P (TREE_TYPE (alias)) |
1148 | && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE | |
1149 | && !POINTER_TYPE_P (TREE_TYPE (alias))) | |
1150 | { | |
1151 | #ifdef ACCESS_DEBUGGING | |
1152 | fprintf (stderr, "Access to "); | |
1153 | print_generic_expr (stderr, ref, 0); | |
1154 | fprintf (stderr, " may not touch "); | |
1155 | print_generic_expr (stderr, alias, 0); | |
1156 | fprintf (stderr, " in function %s\n", get_name (current_function_decl)); | |
1157 | #endif | |
1158 | return false; | |
1159 | } | |
6de9cd9a | 1160 | |
02075bb2 DN |
1161 | /* If the offset of the access is greater than the size of one of |
1162 | the possible aliases, it can't be touching that alias, because it | |
1163 | would be past the end of the structure. */ | |
1164 | else if (ref | |
1165 | && flag_strict_aliasing | |
1166 | && TREE_CODE (ref) != INDIRECT_REF | |
1167 | && !MTAG_P (alias) | |
1168 | && !POINTER_TYPE_P (TREE_TYPE (alias)) | |
1169 | && offsetgtz | |
1170 | && DECL_SIZE (alias) | |
1171 | && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST | |
1172 | && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias))) | |
1173 | { | |
1174 | #ifdef ACCESS_DEBUGGING | |
1175 | fprintf (stderr, "Access to "); | |
1176 | print_generic_expr (stderr, ref, 0); | |
1177 | fprintf (stderr, " may not touch "); | |
1178 | print_generic_expr (stderr, alias, 0); | |
1179 | fprintf (stderr, " in function %s\n", get_name (current_function_decl)); | |
1180 | #endif | |
1181 | return false; | |
1182 | } | |
6de9cd9a | 1183 | |
02075bb2 | 1184 | return true; |
f430bae8 AM |
1185 | } |
1186 | ||
f430bae8 | 1187 | |
02075bb2 DN |
1188 | /* Add VAR to the virtual operands array. FLAGS is as in |
1189 | get_expr_operands. FULL_REF is a tree that contains the entire | |
1190 | pointer dereference expression, if available, or NULL otherwise. | |
1191 | OFFSET and SIZE come from the memory access expression that | |
1192 | generated this virtual operand. FOR_CLOBBER is true is this is | |
1193 | adding a virtual operand for a call clobber. */ | |
1194 | ||
1195 | static void | |
1196 | add_virtual_operand (tree var, stmt_ann_t s_ann, int flags, | |
1197 | tree full_ref, HOST_WIDE_INT offset, | |
1198 | HOST_WIDE_INT size, bool for_clobber) | |
f430bae8 | 1199 | { |
02075bb2 DN |
1200 | VEC(tree,gc) *aliases; |
1201 | tree sym; | |
1202 | var_ann_t v_ann; | |
f430bae8 | 1203 | |
02075bb2 DN |
1204 | sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); |
1205 | v_ann = var_ann (sym); | |
1206 | ||
1207 | /* Mark statements with volatile operands. Optimizers should back | |
1208 | off from statements having volatile operands. */ | |
1209 | if (TREE_THIS_VOLATILE (sym) && s_ann) | |
1210 | s_ann->has_volatile_ops = true; | |
f430bae8 | 1211 | |
02075bb2 DN |
1212 | /* If the variable cannot be modified and this is a V_MAY_DEF change |
1213 | it into a VUSE. This happens when read-only variables are marked | |
1214 | call-clobbered and/or aliased to writable variables. So we only | |
1215 | check that this only happens on non-specific stores. | |
1a24f92f | 1216 | |
02075bb2 DN |
1217 | Note that if this is a specific store, i.e. associated with a |
1218 | modify_expr, then we can't suppress the V_MAY_DEF, lest we run | |
1219 | into validation problems. | |
1a24f92f | 1220 | |
02075bb2 DN |
1221 | This can happen when programs cast away const, leaving us with a |
1222 | store to read-only memory. If the statement is actually executed | |
1223 | at runtime, then the program is ill formed. If the statement is | |
1224 | not executed then all is well. At the very least, we cannot ICE. */ | |
1225 | if ((flags & opf_non_specific) && unmodifiable_var_p (var)) | |
1226 | flags &= ~(opf_is_def | opf_kill_def); | |
1227 | ||
1228 | /* The variable is not a GIMPLE register. Add it (or its aliases) to | |
1229 | virtual operands, unless the caller has specifically requested | |
1230 | not to add virtual operands (used when adding operands inside an | |
1231 | ADDR_EXPR expression). */ | |
1232 | if (flags & opf_no_vops) | |
f47c96aa | 1233 | return; |
02075bb2 DN |
1234 | |
1235 | aliases = v_ann->may_aliases; | |
1236 | if (aliases == NULL) | |
1237 | { | |
1238 | /* The variable is not aliased or it is an alias tag. */ | |
1239 | if (flags & opf_is_def) | |
1240 | { | |
1241 | if (flags & opf_kill_def) | |
1242 | { | |
1243 | /* V_MUST_DEF for non-aliased, non-GIMPLE register | |
1244 | variable definitions. */ | |
1245 | gcc_assert (!MTAG_P (var) | |
1246 | || TREE_CODE (var) == STRUCT_FIELD_TAG); | |
1247 | append_v_must_def (var); | |
1248 | } | |
1249 | else | |
1250 | { | |
1251 | /* Add a V_MAY_DEF for call-clobbered variables and | |
1252 | memory tags. */ | |
1253 | append_v_may_def (var); | |
1254 | } | |
1255 | } | |
1256 | else | |
1257 | append_vuse (var); | |
1258 | } | |
1259 | else | |
1260 | { | |
1261 | unsigned i; | |
1262 | tree al; | |
1263 | ||
1264 | /* The variable is aliased. Add its aliases to the virtual | |
1265 | operands. */ | |
1266 | gcc_assert (VEC_length (tree, aliases) != 0); | |
1267 | ||
1268 | if (flags & opf_is_def) | |
1269 | { | |
1270 | ||
1271 | bool none_added = true; | |
f47c96aa | 1272 | |
02075bb2 DN |
1273 | for (i = 0; VEC_iterate (tree, aliases, i, al); i++) |
1274 | { | |
1275 | if (!access_can_touch_variable (full_ref, al, offset, size)) | |
1276 | continue; | |
1277 | ||
1278 | none_added = false; | |
1279 | append_v_may_def (al); | |
1280 | } | |
f47c96aa | 1281 | |
02075bb2 DN |
1282 | /* If the variable is also an alias tag, add a virtual |
1283 | operand for it, otherwise we will miss representing | |
1284 | references to the members of the variable's alias set. | |
1285 | This fixes the bug in gcc.c-torture/execute/20020503-1.c. | |
1286 | ||
1287 | It is also necessary to add bare defs on clobbers for | |
18cd8a03 | 1288 | SMT's, so that bare SMT uses caused by pruning all the |
02075bb2 DN |
1289 | aliases will link up properly with calls. In order to |
1290 | keep the number of these bare defs we add down to the | |
18cd8a03 | 1291 | minimum necessary, we keep track of which SMT's were used |
65ad7c63 | 1292 | alone in statement vdefs or VUSEs. */ |
02075bb2 DN |
1293 | if (v_ann->is_aliased |
1294 | || none_added | |
18cd8a03 DN |
1295 | || (TREE_CODE (var) == SYMBOL_MEMORY_TAG |
1296 | && for_clobber | |
1297 | && SMT_USED_ALONE (var))) | |
02075bb2 | 1298 | { |
18cd8a03 | 1299 | /* Every bare SMT def we add should have SMT_USED_ALONE |
02075bb2 DN |
1300 | set on it, or else we will get the wrong answer on |
1301 | clobbers. */ | |
18cd8a03 DN |
1302 | if (none_added |
1303 | && !updating_used_alone && aliases_computed_p | |
1304 | && TREE_CODE (var) == SYMBOL_MEMORY_TAG) | |
1305 | gcc_assert (SMT_USED_ALONE (var)); | |
f47c96aa | 1306 | |
02075bb2 DN |
1307 | append_v_may_def (var); |
1308 | } | |
1309 | } | |
1310 | else | |
1311 | { | |
1312 | bool none_added = true; | |
1313 | for (i = 0; VEC_iterate (tree, aliases, i, al); i++) | |
1314 | { | |
1315 | if (!access_can_touch_variable (full_ref, al, offset, size)) | |
1316 | continue; | |
1317 | none_added = false; | |
1318 | append_vuse (al); | |
1319 | } | |
f47c96aa | 1320 | |
02075bb2 DN |
1321 | /* Similarly, append a virtual uses for VAR itself, when |
1322 | it is an alias tag. */ | |
1323 | if (v_ann->is_aliased || none_added) | |
1324 | append_vuse (var); | |
1325 | } | |
1326 | } | |
f47c96aa AM |
1327 | } |
1328 | ||
f47c96aa | 1329 | |
02075bb2 DN |
1330 | /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in |
1331 | get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to | |
1332 | the statement's real operands, otherwise it is added to virtual | |
1333 | operands. */ | |
1334 | ||
1335 | static void | |
1336 | add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags) | |
f47c96aa | 1337 | { |
02075bb2 DN |
1338 | bool is_real_op; |
1339 | tree var, sym; | |
1340 | var_ann_t v_ann; | |
f47c96aa | 1341 | |
02075bb2 DN |
1342 | var = *var_p; |
1343 | gcc_assert (SSA_VAR_P (var)); | |
f47c96aa | 1344 | |
02075bb2 | 1345 | is_real_op = is_gimple_reg (var); |
f47c96aa | 1346 | |
02075bb2 DN |
1347 | /* If this is a real operand, the operand is either an SSA name or a |
1348 | decl. Virtual operands may only be decls. */ | |
1349 | gcc_assert (is_real_op || DECL_P (var)); | |
f47c96aa | 1350 | |
02075bb2 DN |
1351 | sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); |
1352 | v_ann = var_ann (sym); | |
f47c96aa | 1353 | |
02075bb2 DN |
1354 | /* Mark statements with volatile operands. Optimizers should back |
1355 | off from statements having volatile operands. */ | |
1356 | if (TREE_THIS_VOLATILE (sym) && s_ann) | |
1357 | s_ann->has_volatile_ops = true; | |
f47c96aa | 1358 | |
02075bb2 | 1359 | if (is_real_op) |
f47c96aa | 1360 | { |
02075bb2 DN |
1361 | /* The variable is a GIMPLE register. Add it to real operands. */ |
1362 | if (flags & opf_is_def) | |
1363 | append_def (var_p); | |
1364 | else | |
1365 | append_use (var_p); | |
f47c96aa | 1366 | } |
02075bb2 DN |
1367 | else |
1368 | add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false); | |
1369 | } | |
f47c96aa | 1370 | |
f47c96aa | 1371 | |
02075bb2 DN |
1372 | /* A subroutine of get_expr_operands to handle INDIRECT_REF, |
1373 | ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. | |
f47c96aa | 1374 | |
02075bb2 DN |
1375 | STMT is the statement being processed, EXPR is the INDIRECT_REF |
1376 | that got us here. | |
1377 | ||
1378 | FLAGS is as in get_expr_operands. | |
1a24f92f | 1379 | |
02075bb2 DN |
1380 | FULL_REF contains the full pointer dereference expression, if we |
1381 | have it, or NULL otherwise. | |
1a24f92f | 1382 | |
02075bb2 DN |
1383 | OFFSET and SIZE are the location of the access inside the |
1384 | dereferenced pointer, if known. | |
f47c96aa | 1385 | |
02075bb2 DN |
1386 | RECURSE_ON_BASE should be set to true if we want to continue |
1387 | calling get_expr_operands on the base pointer, and false if | |
1388 | something else will do it for us. */ | |
f47c96aa | 1389 | |
02075bb2 DN |
1390 | static void |
1391 | get_indirect_ref_operands (tree stmt, tree expr, int flags, | |
1392 | tree full_ref, | |
1393 | HOST_WIDE_INT offset, HOST_WIDE_INT size, | |
1394 | bool recurse_on_base) | |
1395 | { | |
1396 | tree *pptr = &TREE_OPERAND (expr, 0); | |
1397 | tree ptr = *pptr; | |
1398 | stmt_ann_t s_ann = stmt_ann (stmt); | |
f47c96aa | 1399 | |
02075bb2 DN |
1400 | /* Stores into INDIRECT_REF operands are never killing definitions. */ |
1401 | flags &= ~opf_kill_def; | |
f47c96aa | 1402 | |
02075bb2 | 1403 | if (SSA_VAR_P (ptr)) |
f47c96aa | 1404 | { |
02075bb2 DN |
1405 | struct ptr_info_def *pi = NULL; |
1406 | ||
1407 | /* If PTR has flow-sensitive points-to information, use it. */ | |
1408 | if (TREE_CODE (ptr) == SSA_NAME | |
1409 | && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL | |
1410 | && pi->name_mem_tag) | |
f47c96aa | 1411 | { |
02075bb2 DN |
1412 | /* PTR has its own memory tag. Use it. */ |
1413 | add_virtual_operand (pi->name_mem_tag, s_ann, flags, | |
1414 | full_ref, offset, size, false); | |
f47c96aa | 1415 | } |
02075bb2 | 1416 | else |
f47c96aa | 1417 | { |
02075bb2 | 1418 | /* If PTR is not an SSA_NAME or it doesn't have a name |
18cd8a03 | 1419 | tag, use its symbol memory tag. */ |
02075bb2 | 1420 | var_ann_t v_ann; |
f47c96aa | 1421 | |
02075bb2 DN |
1422 | /* If we are emitting debugging dumps, display a warning if |
1423 | PTR is an SSA_NAME with no flow-sensitive alias | |
1424 | information. That means that we may need to compute | |
1425 | aliasing again. */ | |
1426 | if (dump_file | |
1427 | && TREE_CODE (ptr) == SSA_NAME | |
1428 | && pi == NULL) | |
1429 | { | |
1430 | fprintf (dump_file, | |
1431 | "NOTE: no flow-sensitive alias info for "); | |
1432 | print_generic_expr (dump_file, ptr, dump_flags); | |
1433 | fprintf (dump_file, " in "); | |
1434 | print_generic_stmt (dump_file, stmt, dump_flags); | |
1435 | } | |
f430bae8 | 1436 | |
02075bb2 DN |
1437 | if (TREE_CODE (ptr) == SSA_NAME) |
1438 | ptr = SSA_NAME_VAR (ptr); | |
1439 | v_ann = var_ann (ptr); | |
f430bae8 | 1440 | |
18cd8a03 DN |
1441 | if (v_ann->symbol_mem_tag) |
1442 | add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags, | |
02075bb2 | 1443 | full_ref, offset, size, false); |
f430bae8 AM |
1444 | } |
1445 | } | |
02075bb2 DN |
1446 | else if (TREE_CODE (ptr) == INTEGER_CST) |
1447 | { | |
1448 | /* If a constant is used as a pointer, we can't generate a real | |
1449 | operand for it but we mark the statement volatile to prevent | |
1450 | optimizations from messing things up. */ | |
1451 | if (s_ann) | |
1452 | s_ann->has_volatile_ops = true; | |
1453 | return; | |
1454 | } | |
1455 | else | |
1456 | { | |
1457 | /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */ | |
1458 | gcc_unreachable (); | |
1459 | } | |
f430bae8 | 1460 | |
02075bb2 DN |
1461 | /* If requested, add a USE operand for the base pointer. */ |
1462 | if (recurse_on_base) | |
1463 | get_expr_operands (stmt, pptr, opf_none); | |
f430bae8 AM |
1464 | } |
1465 | ||
643519b7 | 1466 | |
02075bb2 | 1467 | /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ |
6de9cd9a DN |
1468 | |
1469 | static void | |
02075bb2 | 1470 | get_tmr_operands (tree stmt, tree expr, int flags) |
6de9cd9a | 1471 | { |
02075bb2 DN |
1472 | tree tag = TMR_TAG (expr), ref; |
1473 | HOST_WIDE_INT offset, size, maxsize; | |
1474 | subvar_t svars, sv; | |
e288e2f5 | 1475 | stmt_ann_t s_ann = stmt_ann (stmt); |
6de9cd9a | 1476 | |
02075bb2 DN |
1477 | /* First record the real operands. */ |
1478 | get_expr_operands (stmt, &TMR_BASE (expr), opf_none); | |
1479 | get_expr_operands (stmt, &TMR_INDEX (expr), opf_none); | |
6de9cd9a | 1480 | |
02075bb2 DN |
1481 | /* MEM_REFs should never be killing. */ |
1482 | flags &= ~opf_kill_def; | |
6de9cd9a | 1483 | |
02075bb2 | 1484 | if (TMR_SYMBOL (expr)) |
6de9cd9a | 1485 | { |
02075bb2 DN |
1486 | stmt_ann_t ann = stmt_ann (stmt); |
1487 | add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken); | |
1488 | } | |
6de9cd9a | 1489 | |
02075bb2 DN |
1490 | if (!tag) |
1491 | { | |
1492 | /* Something weird, so ensure that we will be careful. */ | |
1493 | stmt_ann (stmt)->has_volatile_ops = true; | |
310de761 | 1494 | return; |
02075bb2 | 1495 | } |
44de5aeb | 1496 | |
02075bb2 DN |
1497 | if (DECL_P (tag)) |
1498 | { | |
1499 | get_expr_operands (stmt, &tag, flags); | |
1500 | return; | |
1501 | } | |
643519b7 | 1502 | |
02075bb2 DN |
1503 | ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize); |
1504 | gcc_assert (ref != NULL_TREE); | |
1505 | svars = get_subvars_for_var (ref); | |
1506 | for (sv = svars; sv; sv = sv->next) | |
1507 | { | |
1508 | bool exact; | |
1509 | if (overlap_subvar (offset, maxsize, sv->var, &exact)) | |
1510 | { | |
1511 | int subvar_flags = flags; | |
1512 | if (!exact || size != maxsize) | |
1513 | subvar_flags &= ~opf_kill_def; | |
1514 | add_stmt_operand (&sv->var, s_ann, subvar_flags); | |
1515 | } | |
1516 | } | |
1517 | } | |
643519b7 | 1518 | |
7ccf35ed | 1519 | |
02075bb2 DN |
1520 | /* Add clobbering definitions for .GLOBAL_VAR or for each of the call |
1521 | clobbered variables in the function. */ | |
6de9cd9a | 1522 | |
02075bb2 DN |
1523 | static void |
1524 | add_call_clobber_ops (tree stmt, tree callee) | |
1525 | { | |
1526 | unsigned u; | |
1527 | bitmap_iterator bi; | |
1528 | stmt_ann_t s_ann = stmt_ann (stmt); | |
1529 | bitmap not_read_b, not_written_b; | |
1530 | ||
1531 | /* Functions that are not const, pure or never return may clobber | |
1532 | call-clobbered variables. */ | |
1533 | if (s_ann) | |
1534 | s_ann->makes_clobbering_call = true; | |
ac182688 | 1535 | |
02075bb2 DN |
1536 | /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases |
1537 | for the heuristic used to decide whether to create .GLOBAL_VAR or not. */ | |
1538 | if (global_var) | |
1539 | { | |
1540 | add_stmt_operand (&global_var, s_ann, opf_is_def); | |
6de9cd9a | 1541 | return; |
02075bb2 | 1542 | } |
6de9cd9a | 1543 | |
02075bb2 DN |
1544 | /* Get info for local and module level statics. There is a bit |
1545 | set for each static if the call being processed does not read | |
1546 | or write that variable. */ | |
1547 | not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL; | |
1548 | not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL; | |
1549 | /* Add a V_MAY_DEF operand for every call clobbered variable. */ | |
1550 | EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi) | |
1551 | { | |
1552 | tree var = referenced_var_lookup (u); | |
1553 | unsigned int escape_mask = var_ann (var)->escape_mask; | |
1554 | tree real_var = var; | |
1555 | bool not_read; | |
1556 | bool not_written; | |
1557 | ||
1558 | /* Not read and not written are computed on regular vars, not | |
1559 | subvars, so look at the parent var if this is an SFT. */ | |
1560 | if (TREE_CODE (var) == STRUCT_FIELD_TAG) | |
1561 | real_var = SFT_PARENT_VAR (var); | |
1562 | ||
1563 | not_read = not_read_b ? bitmap_bit_p (not_read_b, | |
1564 | DECL_UID (real_var)) : false; | |
1565 | not_written = not_written_b ? bitmap_bit_p (not_written_b, | |
1566 | DECL_UID (real_var)) : false; | |
1567 | gcc_assert (!unmodifiable_var_p (var)); | |
1568 | ||
1569 | clobber_stats.clobbered_vars++; | |
1570 | ||
1571 | /* See if this variable is really clobbered by this function. */ | |
1572 | ||
1573 | /* Trivial case: Things escaping only to pure/const are not | |
1574 | clobbered by non-pure-const, and only read by pure/const. */ | |
1575 | if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0) | |
1576 | { | |
1577 | tree call = get_call_expr_in (stmt); | |
1578 | if (call_expr_flags (call) & (ECF_CONST | ECF_PURE)) | |
1579 | { | |
1580 | add_stmt_operand (&var, s_ann, opf_none); | |
1581 | clobber_stats.unescapable_clobbers_avoided++; | |
1582 | continue; | |
1583 | } | |
1584 | else | |
1585 | { | |
1586 | clobber_stats.unescapable_clobbers_avoided++; | |
1587 | continue; | |
1588 | } | |
1589 | } | |
1590 | ||
1591 | if (not_written) | |
1592 | { | |
1593 | clobber_stats.static_write_clobbers_avoided++; | |
1594 | if (!not_read) | |
1595 | add_stmt_operand (&var, s_ann, opf_none); | |
1596 | else | |
1597 | clobber_stats.static_read_clobbers_avoided++; | |
1598 | } | |
1599 | else | |
65ad7c63 | 1600 | add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true); |
02075bb2 | 1601 | } |
02075bb2 DN |
1602 | } |
1603 | ||
1604 | ||
1605 | /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the | |
1606 | function. */ | |
1607 | ||
1608 | static void | |
1609 | add_call_read_ops (tree stmt, tree callee) | |
1610 | { | |
1611 | unsigned u; | |
1612 | bitmap_iterator bi; | |
1613 | stmt_ann_t s_ann = stmt_ann (stmt); | |
1614 | bitmap not_read_b; | |
1615 | ||
1616 | /* if the function is not pure, it may reference memory. Add | |
1617 | a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var | |
1618 | for the heuristic used to decide whether to create .GLOBAL_VAR. */ | |
1619 | if (global_var) | |
1620 | { | |
1621 | add_stmt_operand (&global_var, s_ann, opf_none); | |
1622 | return; | |
1623 | } | |
1624 | ||
1625 | not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL; | |
1626 | ||
1627 | /* Add a VUSE for each call-clobbered variable. */ | |
1628 | EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi) | |
1629 | { | |
1630 | tree var = referenced_var (u); | |
1631 | tree real_var = var; | |
1632 | bool not_read; | |
1633 | ||
1634 | clobber_stats.readonly_clobbers++; | |
1635 | ||
1636 | /* Not read and not written are computed on regular vars, not | |
1637 | subvars, so look at the parent var if this is an SFT. */ | |
1638 | ||
1639 | if (TREE_CODE (var) == STRUCT_FIELD_TAG) | |
1640 | real_var = SFT_PARENT_VAR (var); | |
1641 | ||
65ad7c63 DN |
1642 | not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var)) |
1643 | : false; | |
02075bb2 DN |
1644 | |
1645 | if (not_read) | |
1646 | { | |
1647 | clobber_stats.static_readonly_clobbers_avoided++; | |
1648 | continue; | |
1649 | } | |
1650 | ||
1651 | add_stmt_operand (&var, s_ann, opf_none | opf_non_specific); | |
1652 | } | |
1653 | } | |
1654 | ||
1655 | ||
1656 | /* A subroutine of get_expr_operands to handle CALL_EXPR. */ | |
1657 | ||
1658 | static void | |
1659 | get_call_expr_operands (tree stmt, tree expr) | |
1660 | { | |
1661 | tree op; | |
1662 | int call_flags = call_expr_flags (expr); | |
1663 | ||
1664 | /* If aliases have been computed already, add V_MAY_DEF or V_USE | |
1665 | operands for all the symbols that have been found to be | |
1666 | call-clobbered. | |
1667 | ||
1668 | Note that if aliases have not been computed, the global effects | |
1669 | of calls will not be included in the SSA web. This is fine | |
1670 | because no optimizer should run before aliases have been | |
1671 | computed. By not bothering with virtual operands for CALL_EXPRs | |
1672 | we avoid adding superfluous virtual operands, which can be a | |
1673 | significant compile time sink (See PR 15855). */ | |
1674 | if (aliases_computed_p | |
1675 | && !bitmap_empty_p (call_clobbered_vars) | |
1676 | && !(call_flags & ECF_NOVOPS)) | |
1677 | { | |
1678 | /* A 'pure' or a 'const' function never call-clobbers anything. | |
1679 | A 'noreturn' function might, but since we don't return anyway | |
1680 | there is no point in recording that. */ | |
1681 | if (TREE_SIDE_EFFECTS (expr) | |
1682 | && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) | |
1683 | add_call_clobber_ops (stmt, get_callee_fndecl (expr)); | |
1684 | else if (!(call_flags & ECF_CONST)) | |
1685 | add_call_read_ops (stmt, get_callee_fndecl (expr)); | |
1686 | } | |
1687 | ||
1688 | /* Find uses in the called function. */ | |
1689 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none); | |
1690 | ||
1691 | for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op)) | |
1692 | get_expr_operands (stmt, &TREE_VALUE (op), opf_none); | |
1693 | ||
1694 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); | |
1695 | } | |
1696 | ||
1697 | ||
1698 | /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ | |
1699 | ||
1700 | static void | |
1701 | get_asm_expr_operands (tree stmt) | |
1702 | { | |
1703 | stmt_ann_t s_ann = stmt_ann (stmt); | |
1704 | int noutputs = list_length (ASM_OUTPUTS (stmt)); | |
1705 | const char **oconstraints | |
1706 | = (const char **) alloca ((noutputs) * sizeof (const char *)); | |
1707 | int i; | |
1708 | tree link; | |
1709 | const char *constraint; | |
1710 | bool allows_mem, allows_reg, is_inout; | |
1711 | ||
1712 | for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link)) | |
1713 | { | |
65ad7c63 DN |
1714 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
1715 | oconstraints[i] = constraint; | |
1716 | parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
1717 | &allows_reg, &is_inout); | |
02075bb2 DN |
1718 | |
1719 | /* This should have been split in gimplify_asm_expr. */ | |
1720 | gcc_assert (!allows_reg || !is_inout); | |
1721 | ||
1722 | /* Memory operands are addressable. Note that STMT needs the | |
1723 | address of this operand. */ | |
1724 | if (!allows_reg && allows_mem) | |
1725 | { | |
1726 | tree t = get_base_address (TREE_VALUE (link)); | |
1727 | if (t && DECL_P (t) && s_ann) | |
1728 | add_to_addressable_set (t, &s_ann->addresses_taken); | |
1729 | } | |
1730 | ||
1731 | get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def); | |
1732 | } | |
1733 | ||
1734 | for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link)) | |
1735 | { | |
1736 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); | |
1737 | parse_input_constraint (&constraint, 0, 0, noutputs, 0, | |
1738 | oconstraints, &allows_mem, &allows_reg); | |
1739 | ||
1740 | /* Memory operands are addressable. Note that STMT needs the | |
1741 | address of this operand. */ | |
1742 | if (!allows_reg && allows_mem) | |
1743 | { | |
1744 | tree t = get_base_address (TREE_VALUE (link)); | |
1745 | if (t && DECL_P (t) && s_ann) | |
1746 | add_to_addressable_set (t, &s_ann->addresses_taken); | |
1747 | } | |
1748 | ||
1749 | get_expr_operands (stmt, &TREE_VALUE (link), 0); | |
1750 | } | |
1751 | ||
1752 | ||
1753 | /* Clobber memory for asm ("" : : : "memory"); */ | |
1754 | for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link)) | |
1755 | if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0) | |
1756 | { | |
1757 | unsigned i; | |
1758 | bitmap_iterator bi; | |
1759 | ||
1760 | /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we | |
1761 | decided to group them). */ | |
1762 | if (global_var) | |
1763 | add_stmt_operand (&global_var, s_ann, opf_is_def); | |
1764 | else | |
1765 | EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi) | |
1766 | { | |
1767 | tree var = referenced_var (i); | |
1768 | add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific); | |
1769 | } | |
1770 | ||
1771 | /* Now clobber all addressables. */ | |
1772 | EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi) | |
1773 | { | |
1774 | tree var = referenced_var (i); | |
1775 | ||
1776 | /* Subvars are explicitly represented in this list, so | |
1777 | we don't need the original to be added to the clobber | |
1778 | ops, but the original *will* be in this list because | |
1779 | we keep the addressability of the original | |
1780 | variable up-to-date so we don't screw up the rest of | |
1781 | the backend. */ | |
1782 | if (var_can_have_subvars (var) | |
1783 | && get_subvars_for_var (var) != NULL) | |
1784 | continue; | |
1785 | ||
1786 | add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific); | |
1787 | } | |
1788 | ||
1789 | break; | |
1790 | } | |
1791 | } | |
1792 | ||
1793 | ||
65ad7c63 DN |
1794 | /* Scan operands for the assignment expression EXPR in statement STMT. */ |
1795 | ||
1796 | static void | |
1797 | get_modify_expr_operands (tree stmt, tree expr) | |
1798 | { | |
1799 | /* First get operands from the RHS. */ | |
1800 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); | |
1801 | ||
1802 | /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE | |
1803 | registers. If the LHS is a store to memory, we will either need | |
1804 | a preserving definition (V_MAY_DEF) or a killing definition | |
1805 | (V_MUST_DEF). | |
1806 | ||
1807 | Preserving definitions are those that modify a part of an | |
1808 | aggregate object for which no subvars have been computed (or the | |
1809 | reference does not correspond exactly to one of them). Stores | |
1810 | through a pointer are also represented with V_MAY_DEF operators. | |
1811 | ||
1812 | The determination of whether to use a preserving or a killing | |
1813 | definition is done while scanning the LHS of the assignment. By | |
1814 | default, assume that we will emit a V_MUST_DEF. */ | |
1815 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def); | |
1816 | } | |
1817 | ||
1818 | ||
02075bb2 | 1819 | /* Recursively scan the expression pointed to by EXPR_P in statement |
65ad7c63 DN |
1820 | STMT. FLAGS is one of the OPF_* constants modifying how to |
1821 | interpret the operands found. */ | |
02075bb2 DN |
1822 | |
1823 | static void | |
1824 | get_expr_operands (tree stmt, tree *expr_p, int flags) | |
1825 | { | |
1826 | enum tree_code code; | |
1827 | enum tree_code_class class; | |
1828 | tree expr = *expr_p; | |
1829 | stmt_ann_t s_ann = stmt_ann (stmt); | |
1830 | ||
1831 | if (expr == NULL) | |
1832 | return; | |
1833 | ||
1834 | code = TREE_CODE (expr); | |
1835 | class = TREE_CODE_CLASS (code); | |
1836 | ||
1837 | switch (code) | |
1838 | { | |
1839 | case ADDR_EXPR: | |
1840 | /* Taking the address of a variable does not represent a | |
1841 | reference to it, but the fact that the statement takes its | |
1842 | address will be of interest to some passes (e.g. alias | |
1843 | resolution). */ | |
1844 | add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken); | |
1845 | ||
1846 | /* If the address is invariant, there may be no interesting | |
1847 | variable references inside. */ | |
1848 | if (is_gimple_min_invariant (expr)) | |
1849 | return; | |
1850 | ||
1851 | /* Otherwise, there may be variables referenced inside but there | |
1852 | should be no VUSEs created, since the referenced objects are | |
1853 | not really accessed. The only operands that we should find | |
1854 | here are ARRAY_REF indices which will always be real operands | |
1855 | (GIMPLE does not allow non-registers as array indices). */ | |
1856 | flags |= opf_no_vops; | |
1857 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
1858 | return; | |
1859 | ||
1860 | case SSA_NAME: | |
1861 | case STRUCT_FIELD_TAG: | |
18cd8a03 | 1862 | case SYMBOL_MEMORY_TAG: |
02075bb2 DN |
1863 | case NAME_MEMORY_TAG: |
1864 | add_stmt_operand (expr_p, s_ann, flags); | |
1865 | return; | |
1866 | ||
1867 | case VAR_DECL: | |
1868 | case PARM_DECL: | |
1869 | case RESULT_DECL: | |
1870 | { | |
1871 | subvar_t svars; | |
1872 | ||
65ad7c63 | 1873 | /* Add the subvars for a variable, if it has subvars, to DEFS |
02075bb2 DN |
1874 | or USES. Otherwise, add the variable itself. Whether it |
1875 | goes to USES or DEFS depends on the operand flags. */ | |
1876 | if (var_can_have_subvars (expr) | |
1877 | && (svars = get_subvars_for_var (expr))) | |
1878 | { | |
1879 | subvar_t sv; | |
1880 | for (sv = svars; sv; sv = sv->next) | |
1881 | add_stmt_operand (&sv->var, s_ann, flags); | |
1882 | } | |
1883 | else | |
1884 | add_stmt_operand (expr_p, s_ann, flags); | |
1885 | ||
1886 | return; | |
1887 | } | |
1888 | ||
1889 | case MISALIGNED_INDIRECT_REF: | |
1890 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
1891 | /* fall through */ | |
1892 | ||
1893 | case ALIGN_INDIRECT_REF: | |
1894 | case INDIRECT_REF: | |
65ad7c63 | 1895 | get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true); |
02075bb2 DN |
1896 | return; |
1897 | ||
1898 | case TARGET_MEM_REF: | |
1899 | get_tmr_operands (stmt, expr, flags); | |
1900 | return; | |
1901 | ||
02075bb2 | 1902 | case ARRAY_REF: |
65ad7c63 | 1903 | case ARRAY_RANGE_REF: |
02075bb2 DN |
1904 | case COMPONENT_REF: |
1905 | case REALPART_EXPR: | |
1906 | case IMAGPART_EXPR: | |
1907 | { | |
c75ab022 | 1908 | tree ref; |
6bec9271 | 1909 | HOST_WIDE_INT offset, size, maxsize; |
758cf3f2 | 1910 | bool none = true; |
c75ab022 | 1911 | |
643519b7 DN |
1912 | /* This component reference becomes an access to all of the |
1913 | subvariables it can touch, if we can determine that, but | |
1914 | *NOT* the real one. If we can't determine which fields we | |
1915 | could touch, the recursion will eventually get to a | |
1916 | variable and add *all* of its subvars, or whatever is the | |
1917 | minimum correct subset. */ | |
6bec9271 RG |
1918 | ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize); |
1919 | if (SSA_VAR_P (ref) && get_subvars_for_var (ref)) | |
643519b7 | 1920 | { |
c75ab022 | 1921 | subvar_t sv; |
643519b7 DN |
1922 | subvar_t svars = get_subvars_for_var (ref); |
1923 | ||
c75ab022 DB |
1924 | for (sv = svars; sv; sv = sv->next) |
1925 | { | |
1926 | bool exact; | |
643519b7 | 1927 | |
3c0b6c43 | 1928 | if (overlap_subvar (offset, maxsize, sv->var, &exact)) |
c75ab022 | 1929 | { |
98b6d477 | 1930 | int subvar_flags = flags; |
758cf3f2 | 1931 | none = false; |
643519b7 | 1932 | if (!exact || size != maxsize) |
7fac66d4 JH |
1933 | subvar_flags &= ~opf_kill_def; |
1934 | add_stmt_operand (&sv->var, s_ann, subvar_flags); | |
c75ab022 DB |
1935 | } |
1936 | } | |
643519b7 | 1937 | |
758cf3f2 RG |
1938 | if (!none) |
1939 | flags |= opf_no_vops; | |
c75ab022 | 1940 | } |
3c0b6c43 DB |
1941 | else if (TREE_CODE (ref) == INDIRECT_REF) |
1942 | { | |
65ad7c63 DN |
1943 | get_indirect_ref_operands (stmt, ref, flags, expr, offset, |
1944 | maxsize, false); | |
3c0b6c43 DB |
1945 | flags |= opf_no_vops; |
1946 | } | |
758cf3f2 RG |
1947 | |
1948 | /* Even if we found subvars above we need to ensure to see | |
1949 | immediate uses for d in s.a[d]. In case of s.a having | |
65ad7c63 | 1950 | a subvar or we would miss it otherwise. */ |
643519b7 | 1951 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), |
758cf3f2 | 1952 | flags & ~opf_kill_def); |
c75ab022 DB |
1953 | |
1954 | if (code == COMPONENT_REF) | |
305a1321 | 1955 | { |
707db096 | 1956 | if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) |
305a1321 MM |
1957 | s_ann->has_volatile_ops = true; |
1958 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); | |
1959 | } | |
65ad7c63 | 1960 | else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) |
a916f21d RG |
1961 | { |
1962 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); | |
1963 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); | |
1964 | get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none); | |
1965 | } | |
643519b7 | 1966 | |
c75ab022 DB |
1967 | return; |
1968 | } | |
643519b7 | 1969 | |
d25cee4d | 1970 | case WITH_SIZE_EXPR: |
0e28378a | 1971 | /* WITH_SIZE_EXPR is a pass-through reference to its first argument, |
d25cee4d | 1972 | and an rvalue reference to its second argument. */ |
1a24f92f AM |
1973 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); |
1974 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
d25cee4d RH |
1975 | return; |
1976 | ||
310de761 | 1977 | case CALL_EXPR: |
1a24f92f | 1978 | get_call_expr_operands (stmt, expr); |
6de9cd9a | 1979 | return; |
6de9cd9a | 1980 | |
40923b20 | 1981 | case COND_EXPR: |
ad9f20cb DP |
1982 | case VEC_COND_EXPR: |
1983 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none); | |
40923b20 DP |
1984 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); |
1985 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); | |
1986 | return; | |
1987 | ||
310de761 | 1988 | case MODIFY_EXPR: |
65ad7c63 DN |
1989 | get_modify_expr_operands (stmt, expr); |
1990 | return; | |
6de9cd9a | 1991 | |
7b48e1e0 RH |
1992 | case CONSTRUCTOR: |
1993 | { | |
1994 | /* General aggregate CONSTRUCTORs have been decomposed, but they | |
1995 | are still in use as the COMPLEX_EXPR equivalent for vectors. */ | |
4038c495 GB |
1996 | constructor_elt *ce; |
1997 | unsigned HOST_WIDE_INT idx; | |
7b48e1e0 | 1998 | |
4038c495 GB |
1999 | for (idx = 0; |
2000 | VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce); | |
2001 | idx++) | |
2002 | get_expr_operands (stmt, &ce->value, opf_none); | |
7b48e1e0 RH |
2003 | |
2004 | return; | |
2005 | } | |
2006 | ||
310de761 | 2007 | case BIT_FIELD_REF: |
65ad7c63 DN |
2008 | /* Stores using BIT_FIELD_REF are always preserving definitions. */ |
2009 | flags &= ~opf_kill_def; | |
2010 | ||
2011 | /* Fallthru */ | |
2012 | ||
2013 | case TRUTH_NOT_EXPR: | |
4626c433 | 2014 | case VIEW_CONVERT_EXPR: |
310de761 | 2015 | do_unary: |
1a24f92f | 2016 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
6de9cd9a | 2017 | return; |
6de9cd9a | 2018 | |
310de761 RH |
2019 | case TRUTH_AND_EXPR: |
2020 | case TRUTH_OR_EXPR: | |
2021 | case TRUTH_XOR_EXPR: | |
2022 | case COMPOUND_EXPR: | |
2023 | case OBJ_TYPE_REF: | |
0bca51f0 | 2024 | case ASSERT_EXPR: |
310de761 RH |
2025 | do_binary: |
2026 | { | |
1a24f92f AM |
2027 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
2028 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
310de761 RH |
2029 | return; |
2030 | } | |
2031 | ||
20f06221 | 2032 | case DOT_PROD_EXPR: |
7ccf35ed DN |
2033 | case REALIGN_LOAD_EXPR: |
2034 | { | |
2035 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
2036 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
2037 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags); | |
2038 | return; | |
2039 | } | |
2040 | ||
310de761 RH |
2041 | case BLOCK: |
2042 | case FUNCTION_DECL: | |
2043 | case EXC_PTR_EXPR: | |
2044 | case FILTER_EXPR: | |
2045 | case LABEL_DECL: | |
243cdfa8 | 2046 | case CONST_DECL: |
50674e96 DN |
2047 | case OMP_PARALLEL: |
2048 | case OMP_SECTIONS: | |
2049 | case OMP_FOR: | |
50674e96 DN |
2050 | case OMP_SINGLE: |
2051 | case OMP_MASTER: | |
2052 | case OMP_ORDERED: | |
2053 | case OMP_CRITICAL: | |
777f7f9a RH |
2054 | case OMP_RETURN: |
2055 | case OMP_CONTINUE: | |
02075bb2 | 2056 | /* Expressions that make no memory references. */ |
310de761 | 2057 | return; |
02075bb2 DN |
2058 | |
2059 | default: | |
2060 | if (class == tcc_unary) | |
2061 | goto do_unary; | |
2062 | if (class == tcc_binary || class == tcc_comparison) | |
2063 | goto do_binary; | |
2064 | if (class == tcc_constant || class == tcc_type) | |
2065 | return; | |
643519b7 | 2066 | } |
310de761 | 2067 | |
02075bb2 DN |
2068 | /* If we get here, something has gone wrong. */ |
2069 | #ifdef ENABLE_CHECKING | |
2070 | fprintf (stderr, "unhandled expression in get_expr_operands():\n"); | |
2071 | debug_tree (expr); | |
2072 | fputs ("\n", stderr); | |
2073 | #endif | |
2074 | gcc_unreachable (); | |
310de761 RH |
2075 | } |
2076 | ||
643519b7 | 2077 | |
65ad7c63 DN |
2078 | /* Parse STMT looking for operands. When finished, the various |
2079 | build_* operand vectors will have potential operands in them. */ | |
2080 | ||
ac182688 | 2081 | static void |
02075bb2 | 2082 | parse_ssa_operands (tree stmt) |
ac182688 | 2083 | { |
02075bb2 | 2084 | enum tree_code code; |
ac182688 | 2085 | |
02075bb2 DN |
2086 | code = TREE_CODE (stmt); |
2087 | switch (code) | |
2088 | { | |
2089 | case MODIFY_EXPR: | |
65ad7c63 | 2090 | get_modify_expr_operands (stmt, stmt); |
02075bb2 DN |
2091 | break; |
2092 | ||
2093 | case COND_EXPR: | |
2094 | get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none); | |
2095 | break; | |
2096 | ||
2097 | case SWITCH_EXPR: | |
2098 | get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none); | |
2099 | break; | |
2100 | ||
2101 | case ASM_EXPR: | |
2102 | get_asm_expr_operands (stmt); | |
2103 | break; | |
2104 | ||
2105 | case RETURN_EXPR: | |
2106 | get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none); | |
2107 | break; | |
2108 | ||
2109 | case GOTO_EXPR: | |
2110 | get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none); | |
2111 | break; | |
2112 | ||
2113 | case LABEL_EXPR: | |
2114 | get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none); | |
2115 | break; | |
2116 | ||
02075bb2 DN |
2117 | case BIND_EXPR: |
2118 | case CASE_LABEL_EXPR: | |
2119 | case TRY_CATCH_EXPR: | |
2120 | case TRY_FINALLY_EXPR: | |
2121 | case EH_FILTER_EXPR: | |
2122 | case CATCH_EXPR: | |
2123 | case RESX_EXPR: | |
65ad7c63 | 2124 | /* These nodes contain no variable references. */ |
02075bb2 DN |
2125 | break; |
2126 | ||
2127 | default: | |
65ad7c63 DN |
2128 | /* Notice that if get_expr_operands tries to use &STMT as the |
2129 | operand pointer (which may only happen for USE operands), we | |
2130 | will fail in add_stmt_operand. This default will handle | |
2131 | statements like empty statements, or CALL_EXPRs that may | |
2132 | appear on the RHS of a statement or as statements themselves. */ | |
02075bb2 DN |
2133 | get_expr_operands (stmt, &stmt, opf_none); |
2134 | break; | |
9be7ee44 | 2135 | } |
ac182688 ZD |
2136 | } |
2137 | ||
643519b7 | 2138 | |
02075bb2 | 2139 | /* Create an operands cache for STMT. */ |
310de761 RH |
2140 | |
2141 | static void | |
02075bb2 | 2142 | build_ssa_operands (tree stmt) |
310de761 | 2143 | { |
02075bb2 DN |
2144 | stmt_ann_t ann = get_stmt_ann (stmt); |
2145 | ||
2146 | /* Initially assume that the statement has no volatile operands. */ | |
2147 | if (ann) | |
2148 | ann->has_volatile_ops = false; | |
310de761 | 2149 | |
02075bb2 | 2150 | start_ssa_stmt_operands (); |
e288e2f5 | 2151 | |
02075bb2 DN |
2152 | parse_ssa_operands (stmt); |
2153 | operand_build_sort_virtual (build_vuses); | |
2154 | operand_build_sort_virtual (build_v_may_defs); | |
2155 | operand_build_sort_virtual (build_v_must_defs); | |
e288e2f5 | 2156 | |
02075bb2 DN |
2157 | finalize_ssa_stmt_operands (stmt); |
2158 | } | |
e288e2f5 | 2159 | |
e288e2f5 | 2160 | |
02075bb2 | 2161 | /* Free any operands vectors in OPS. */ |
65ad7c63 | 2162 | |
02075bb2 DN |
2163 | void |
2164 | free_ssa_operands (stmt_operands_p ops) | |
2165 | { | |
2166 | ops->def_ops = NULL; | |
2167 | ops->use_ops = NULL; | |
2168 | ops->maydef_ops = NULL; | |
2169 | ops->mustdef_ops = NULL; | |
2170 | ops->vuse_ops = NULL; | |
310de761 RH |
2171 | } |
2172 | ||
3c0b6c43 | 2173 | |
2434ab1d | 2174 | /* Get the operands of statement STMT. */ |
643519b7 | 2175 | |
02075bb2 DN |
2176 | void |
2177 | update_stmt_operands (tree stmt) | |
2178 | { | |
2179 | stmt_ann_t ann = get_stmt_ann (stmt); | |
3c0b6c43 | 2180 | |
65ad7c63 DN |
2181 | /* If update_stmt_operands is called before SSA is initialized, do |
2182 | nothing. */ | |
02075bb2 DN |
2183 | if (!ssa_operands_active ()) |
2184 | return; | |
943261d7 | 2185 | |
02075bb2 DN |
2186 | /* The optimizers cannot handle statements that are nothing but a |
2187 | _DECL. This indicates a bug in the gimplifier. */ | |
2188 | gcc_assert (!SSA_VAR_P (stmt)); | |
6de9cd9a | 2189 | |
02075bb2 | 2190 | gcc_assert (ann->modified); |
643519b7 | 2191 | |
02075bb2 | 2192 | timevar_push (TV_TREE_OPS); |
943261d7 | 2193 | |
02075bb2 | 2194 | build_ssa_operands (stmt); |
643519b7 | 2195 | |
65ad7c63 | 2196 | /* Clear the modified bit for STMT. */ |
02075bb2 | 2197 | ann->modified = 0; |
6de9cd9a | 2198 | |
02075bb2 DN |
2199 | timevar_pop (TV_TREE_OPS); |
2200 | } | |
faf7c678 | 2201 | |
65ad7c63 | 2202 | |
02075bb2 | 2203 | /* Copies virtual operands from SRC to DST. */ |
3c0b6c43 | 2204 | |
02075bb2 DN |
2205 | void |
2206 | copy_virtual_operands (tree dest, tree src) | |
6de9cd9a | 2207 | { |
02075bb2 DN |
2208 | tree t; |
2209 | ssa_op_iter iter, old_iter; | |
2210 | use_operand_p use_p, u2; | |
2211 | def_operand_p def_p, d2; | |
6de9cd9a | 2212 | |
02075bb2 | 2213 | build_ssa_operands (dest); |
0d2bf6f0 | 2214 | |
02075bb2 DN |
2215 | /* Copy all the virtual fields. */ |
2216 | FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE) | |
2217 | append_vuse (t); | |
2218 | FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF) | |
2219 | append_v_may_def (t); | |
2220 | FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF) | |
2221 | append_v_must_def (t); | |
0d2bf6f0 | 2222 | |
02075bb2 DN |
2223 | if (VEC_length (tree, build_vuses) == 0 |
2224 | && VEC_length (tree, build_v_may_defs) == 0 | |
2225 | && VEC_length (tree, build_v_must_defs) == 0) | |
3c0b6c43 | 2226 | return; |
02075bb2 DN |
2227 | |
2228 | /* Now commit the virtual operands to this stmt. */ | |
2229 | finalize_ssa_v_must_defs (dest); | |
2230 | finalize_ssa_v_may_defs (dest); | |
2231 | finalize_ssa_vuses (dest); | |
2232 | ||
2233 | /* Finally, set the field to the same values as then originals. */ | |
02075bb2 DN |
2234 | t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE); |
2235 | FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE) | |
6de9cd9a | 2236 | { |
02075bb2 DN |
2237 | gcc_assert (!op_iter_done (&old_iter)); |
2238 | SET_USE (use_p, t); | |
2239 | t = op_iter_next_tree (&old_iter); | |
6de9cd9a | 2240 | } |
02075bb2 DN |
2241 | gcc_assert (op_iter_done (&old_iter)); |
2242 | ||
2243 | op_iter_init_maydef (&old_iter, src, &u2, &d2); | |
2244 | FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter) | |
6de9cd9a | 2245 | { |
02075bb2 DN |
2246 | gcc_assert (!op_iter_done (&old_iter)); |
2247 | SET_USE (use_p, USE_FROM_PTR (u2)); | |
2248 | SET_DEF (def_p, DEF_FROM_PTR (d2)); | |
2249 | op_iter_next_maymustdef (&u2, &d2, &old_iter); | |
2250 | } | |
2251 | gcc_assert (op_iter_done (&old_iter)); | |
6de9cd9a | 2252 | |
02075bb2 DN |
2253 | op_iter_init_mustdef (&old_iter, src, &u2, &d2); |
2254 | FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter) | |
2255 | { | |
2256 | gcc_assert (!op_iter_done (&old_iter)); | |
2257 | SET_USE (use_p, USE_FROM_PTR (u2)); | |
2258 | SET_DEF (def_p, DEF_FROM_PTR (d2)); | |
2259 | op_iter_next_maymustdef (&u2, &d2, &old_iter); | |
2260 | } | |
2261 | gcc_assert (op_iter_done (&old_iter)); | |
6de9cd9a | 2262 | |
02075bb2 | 2263 | } |
a6c550f9 | 2264 | |
a6c550f9 | 2265 | |
02075bb2 DN |
2266 | /* Specifically for use in DOM's expression analysis. Given a store, we |
2267 | create an artificial stmt which looks like a load from the store, this can | |
2268 | be used to eliminate redundant loads. OLD_OPS are the operands from the | |
2269 | store stmt, and NEW_STMT is the new load which represents a load of the | |
2270 | values stored. */ | |
2271 | ||
2272 | void | |
2273 | create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt) | |
2274 | { | |
2275 | stmt_ann_t ann; | |
2276 | tree op; | |
2277 | ssa_op_iter iter; | |
2278 | use_operand_p use_p; | |
2279 | unsigned x; | |
2280 | ||
2281 | ann = get_stmt_ann (new_stmt); | |
2282 | ||
65ad7c63 | 2283 | /* Process the stmt looking for operands. */ |
02075bb2 DN |
2284 | start_ssa_stmt_operands (); |
2285 | parse_ssa_operands (new_stmt); | |
a6c550f9 | 2286 | |
02075bb2 DN |
2287 | for (x = 0; x < VEC_length (tree, build_vuses); x++) |
2288 | { | |
2289 | tree t = VEC_index (tree, build_vuses, x); | |
2290 | if (TREE_CODE (t) != SSA_NAME) | |
2291 | { | |
2292 | var_ann_t ann = var_ann (t); | |
2293 | ann->in_vuse_list = 0; | |
6de9cd9a | 2294 | } |
02075bb2 DN |
2295 | } |
2296 | ||
2297 | for (x = 0; x < VEC_length (tree, build_v_may_defs); x++) | |
2298 | { | |
2299 | tree t = VEC_index (tree, build_v_may_defs, x); | |
2300 | if (TREE_CODE (t) != SSA_NAME) | |
6de9cd9a | 2301 | { |
02075bb2 DN |
2302 | var_ann_t ann = var_ann (t); |
2303 | ann->in_v_may_def_list = 0; | |
6de9cd9a DN |
2304 | } |
2305 | } | |
6de9cd9a | 2306 | |
02075bb2 DN |
2307 | /* Remove any virtual operands that were found. */ |
2308 | VEC_truncate (tree, build_v_may_defs, 0); | |
2309 | VEC_truncate (tree, build_v_must_defs, 0); | |
2310 | VEC_truncate (tree, build_vuses, 0); | |
faf7c678 | 2311 | |
02075bb2 DN |
2312 | /* For each VDEF on the original statement, we want to create a |
2313 | VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new | |
2314 | statement. */ | |
2315 | FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, | |
2316 | (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF)) | |
2317 | append_vuse (op); | |
2318 | ||
2319 | /* Now build the operands for this new stmt. */ | |
2320 | finalize_ssa_stmt_operands (new_stmt); | |
3c0b6c43 | 2321 | |
02075bb2 DN |
2322 | /* All uses in this fake stmt must not be in the immediate use lists. */ |
2323 | FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES) | |
2324 | delink_imm_use (use_p); | |
2325 | } | |
3c0b6c43 | 2326 | |
3c0b6c43 | 2327 | |
02075bb2 DN |
2328 | /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done |
2329 | to test the validity of the swap operation. */ | |
faf7c678 | 2330 | |
02075bb2 DN |
2331 | void |
2332 | swap_tree_operands (tree stmt, tree *exp0, tree *exp1) | |
2333 | { | |
2334 | tree op0, op1; | |
2335 | op0 = *exp0; | |
2336 | op1 = *exp1; | |
3c0b6c43 | 2337 | |
65ad7c63 DN |
2338 | /* If the operand cache is active, attempt to preserve the relative |
2339 | positions of these two operands in their respective immediate use | |
2340 | lists. */ | |
02075bb2 DN |
2341 | if (ssa_operands_active () && op0 != op1) |
2342 | { | |
2343 | use_optype_p use0, use1, ptr; | |
2344 | use0 = use1 = NULL; | |
3c0b6c43 | 2345 | |
02075bb2 DN |
2346 | /* Find the 2 operands in the cache, if they are there. */ |
2347 | for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next) | |
2348 | if (USE_OP_PTR (ptr)->use == exp0) | |
2349 | { | |
2350 | use0 = ptr; | |
2351 | break; | |
2352 | } | |
3c0b6c43 | 2353 | |
02075bb2 DN |
2354 | for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next) |
2355 | if (USE_OP_PTR (ptr)->use == exp1) | |
2356 | { | |
2357 | use1 = ptr; | |
2358 | break; | |
2359 | } | |
2360 | ||
2361 | /* If both uses don't have operand entries, there isn't much we can do | |
65ad7c63 | 2362 | at this point. Presumably we don't need to worry about it. */ |
02075bb2 DN |
2363 | if (use0 && use1) |
2364 | { | |
2365 | tree *tmp = USE_OP_PTR (use1)->use; | |
2366 | USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use; | |
2367 | USE_OP_PTR (use0)->use = tmp; | |
2368 | } | |
3c0b6c43 | 2369 | } |
02075bb2 DN |
2370 | |
2371 | /* Now swap the data. */ | |
2372 | *exp0 = op1; | |
2373 | *exp1 = op0; | |
3c0b6c43 DB |
2374 | } |
2375 | ||
643519b7 | 2376 | |
e8ca4159 DN |
2377 | /* Add the base address of REF to the set *ADDRESSES_TAKEN. If |
2378 | *ADDRESSES_TAKEN is NULL, a new set is created. REF may be | |
2379 | a single variable whose address has been taken or any other valid | |
2380 | GIMPLE memory reference (structure reference, array, etc). If the | |
2381 | base address of REF is a decl that has sub-variables, also add all | |
2382 | of its sub-variables. */ | |
6de9cd9a | 2383 | |
e8ca4159 DN |
2384 | void |
2385 | add_to_addressable_set (tree ref, bitmap *addresses_taken) | |
6de9cd9a | 2386 | { |
e8ca4159 | 2387 | tree var; |
c75ab022 | 2388 | subvar_t svars; |
c75ab022 | 2389 | |
e8ca4159 DN |
2390 | gcc_assert (addresses_taken); |
2391 | ||
23e66a36 | 2392 | /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF |
e8ca4159 DN |
2393 | as the only thing we take the address of. If VAR is a structure, |
2394 | taking the address of a field means that the whole structure may | |
2395 | be referenced using pointer arithmetic. See PR 21407 and the | |
2396 | ensuing mailing list discussion. */ | |
2397 | var = get_base_address (ref); | |
6de9cd9a DN |
2398 | if (var && SSA_VAR_P (var)) |
2399 | { | |
e8ca4159 DN |
2400 | if (*addresses_taken == NULL) |
2401 | *addresses_taken = BITMAP_GGC_ALLOC (); | |
c75ab022 | 2402 | |
c75ab022 DB |
2403 | if (var_can_have_subvars (var) |
2404 | && (svars = get_subvars_for_var (var))) | |
2405 | { | |
2406 | subvar_t sv; | |
2407 | for (sv = svars; sv; sv = sv->next) | |
e8ca4159 DN |
2408 | { |
2409 | bitmap_set_bit (*addresses_taken, DECL_UID (sv->var)); | |
2410 | TREE_ADDRESSABLE (sv->var) = 1; | |
2411 | } | |
c75ab022 | 2412 | } |
9044951e | 2413 | else |
e8ca4159 DN |
2414 | { |
2415 | bitmap_set_bit (*addresses_taken, DECL_UID (var)); | |
2416 | TREE_ADDRESSABLE (var) = 1; | |
2417 | } | |
6de9cd9a DN |
2418 | } |
2419 | } | |
2420 | ||
643519b7 | 2421 | |
f430bae8 | 2422 | /* Scan the immediate_use list for VAR making sure its linked properly. |
65ad7c63 | 2423 | Return TRUE if there is a problem and emit an error message to F. */ |
f430bae8 AM |
2424 | |
2425 | bool | |
2426 | verify_imm_links (FILE *f, tree var) | |
2427 | { | |
f47c96aa | 2428 | use_operand_p ptr, prev, list; |
f430bae8 AM |
2429 | int count; |
2430 | ||
2431 | gcc_assert (TREE_CODE (var) == SSA_NAME); | |
2432 | ||
2433 | list = &(SSA_NAME_IMM_USE_NODE (var)); | |
2434 | gcc_assert (list->use == NULL); | |
2435 | ||
2436 | if (list->prev == NULL) | |
2437 | { | |
2438 | gcc_assert (list->next == NULL); | |
2439 | return false; | |
2440 | } | |
2441 | ||
2442 | prev = list; | |
2443 | count = 0; | |
2444 | for (ptr = list->next; ptr != list; ) | |
2445 | { | |
2446 | if (prev != ptr->prev) | |
0e61db61 NS |
2447 | goto error; |
2448 | ||
f430bae8 | 2449 | if (ptr->use == NULL) |
0e61db61 NS |
2450 | goto error; /* 2 roots, or SAFE guard node. */ |
2451 | else if (*(ptr->use) != var) | |
2452 | goto error; | |
f430bae8 AM |
2453 | |
2454 | prev = ptr; | |
2455 | ptr = ptr->next; | |
643519b7 DN |
2456 | |
2457 | /* Avoid infinite loops. 50,000,000 uses probably indicates a | |
2458 | problem. */ | |
e84d8064 | 2459 | if (count++ > 50000000) |
0e61db61 | 2460 | goto error; |
f430bae8 AM |
2461 | } |
2462 | ||
2463 | /* Verify list in the other direction. */ | |
2464 | prev = list; | |
2465 | for (ptr = list->prev; ptr != list; ) | |
2466 | { | |
2467 | if (prev != ptr->next) | |
0e61db61 | 2468 | goto error; |
f430bae8 AM |
2469 | prev = ptr; |
2470 | ptr = ptr->prev; | |
2471 | if (count-- < 0) | |
0e61db61 | 2472 | goto error; |
f430bae8 AM |
2473 | } |
2474 | ||
2475 | if (count != 0) | |
0e61db61 | 2476 | goto error; |
f430bae8 AM |
2477 | |
2478 | return false; | |
0e61db61 NS |
2479 | |
2480 | error: | |
2481 | if (ptr->stmt && stmt_modified_p (ptr->stmt)) | |
2482 | { | |
2483 | fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt); | |
2484 | print_generic_stmt (f, ptr->stmt, TDF_SLIM); | |
2485 | } | |
2486 | fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, | |
2487 | (void *)ptr->use); | |
2488 | print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); | |
2489 | fprintf(f, "\n"); | |
2490 | return true; | |
f430bae8 AM |
2491 | } |
2492 | ||
2493 | ||
2494 | /* Dump all the immediate uses to FILE. */ | |
2495 | ||
2496 | void | |
2497 | dump_immediate_uses_for (FILE *file, tree var) | |
2498 | { | |
2499 | imm_use_iterator iter; | |
2500 | use_operand_p use_p; | |
2501 | ||
2502 | gcc_assert (var && TREE_CODE (var) == SSA_NAME); | |
2503 | ||
2504 | print_generic_expr (file, var, TDF_SLIM); | |
2505 | fprintf (file, " : -->"); | |
2506 | if (has_zero_uses (var)) | |
2507 | fprintf (file, " no uses.\n"); | |
2508 | else | |
2509 | if (has_single_use (var)) | |
2510 | fprintf (file, " single use.\n"); | |
2511 | else | |
2512 | fprintf (file, "%d uses.\n", num_imm_uses (var)); | |
2513 | ||
2514 | FOR_EACH_IMM_USE_FAST (use_p, iter, var) | |
2515 | { | |
afd83fe4 AM |
2516 | if (use_p->stmt == NULL && use_p->use == NULL) |
2517 | fprintf (file, "***end of stmt iterator marker***\n"); | |
f47c96aa | 2518 | else |
afd83fe4 AM |
2519 | if (!is_gimple_reg (USE_FROM_PTR (use_p))) |
2520 | print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS); | |
2521 | else | |
2522 | print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM); | |
f430bae8 AM |
2523 | } |
2524 | fprintf(file, "\n"); | |
2525 | } | |
2526 | ||
643519b7 | 2527 | |
f430bae8 AM |
2528 | /* Dump all the immediate uses to FILE. */ |
2529 | ||
2530 | void | |
2531 | dump_immediate_uses (FILE *file) | |
2532 | { | |
2533 | tree var; | |
2534 | unsigned int x; | |
2535 | ||
2536 | fprintf (file, "Immediate_uses: \n\n"); | |
2537 | for (x = 1; x < num_ssa_names; x++) | |
2538 | { | |
2539 | var = ssa_name(x); | |
2540 | if (!var) | |
2541 | continue; | |
2542 | dump_immediate_uses_for (file, var); | |
2543 | } | |
2544 | } | |
2545 | ||
2546 | ||
2547 | /* Dump def-use edges on stderr. */ | |
2548 | ||
2549 | void | |
2550 | debug_immediate_uses (void) | |
2551 | { | |
2552 | dump_immediate_uses (stderr); | |
2553 | } | |
2554 | ||
65ad7c63 | 2555 | |
f430bae8 AM |
2556 | /* Dump def-use edges on stderr. */ |
2557 | ||
2558 | void | |
2559 | debug_immediate_uses_for (tree var) | |
2560 | { | |
2561 | dump_immediate_uses_for (stderr, var); | |
1a24f92f | 2562 | } |
643519b7 | 2563 | |
6de9cd9a | 2564 | #include "gt-tree-ssa-operands.h" |