]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-operands.c
alias.c (ao_ref_from_mem): Adjust.
[thirdparty/gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "tree-pass.h"
33 #include "ggc.h"
34 #include "timevar.h"
35 #include "toplev.h"
36 #include "langhooks.h"
37 #include "ipa-reference.h"
38
39 /* This file contains the code required to manage the operands cache of the
40 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
41 annotation. This cache contains operands that will be of interest to
42 optimizers and other passes wishing to manipulate the IL.
43
44 The operand type are broken up into REAL and VIRTUAL operands. The real
45 operands are represented as pointers into the stmt's operand tree. Thus
46 any manipulation of the real operands will be reflected in the actual tree.
47 Virtual operands are represented solely in the cache, although the base
48 variable for the SSA_NAME may, or may not occur in the stmt's tree.
49 Manipulation of the virtual operands will not be reflected in the stmt tree.
50
51 The routines in this file are concerned with creating this operand cache
52 from a stmt tree.
53
54 The operand tree is the parsed by the various get_* routines which look
55 through the stmt tree for the occurrence of operands which may be of
56 interest, and calls are made to the append_* routines whenever one is
57 found. There are 4 of these routines, each representing one of the
58 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 4 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
76 operand vector for VUSE, then the new vector will also be modified
77 such that it contains 'a_5' rather than 'a'. */
78
79 /* Structure storing statistics on how many call clobbers we have, and
80 how many where avoided. */
81
82 static struct
83 {
84 /* Number of call-clobbered ops we attempt to add to calls in
85 add_call_clobbered_mem_symbols. */
86 unsigned int clobbered_vars;
87
88 /* Number of write-clobbers (VDEFs) avoided by using
89 not_written information. */
90 unsigned int static_write_clobbers_avoided;
91
92 /* Number of reads (VUSEs) avoided by using not_read information. */
93 unsigned int static_read_clobbers_avoided;
94
95 /* Number of write-clobbers avoided because the variable can't escape to
96 this call. */
97 unsigned int unescapable_clobbers_avoided;
98
99 /* Number of read-only uses we attempt to add to calls in
100 add_call_read_mem_symbols. */
101 unsigned int readonly_clobbers;
102
103 /* Number of read-only uses we avoid using not_read information. */
104 unsigned int static_readonly_clobbers_avoided;
105 } clobber_stats;
106
107
108 /* Flags to describe operand properties in helpers. */
109
110 /* By default, operands are loaded. */
111 #define opf_use 0
112
113 /* Operand is the target of an assignment expression or a
114 call-clobbered variable. */
115 #define opf_def (1 << 0)
116
117 /* No virtual operands should be created in the expression. This is used
118 when traversing ADDR_EXPR nodes which have different semantics than
119 other expressions. Inside an ADDR_EXPR node, the only operands that we
120 need to consider are indices into arrays. For instance, &a.b[i] should
121 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
122 VUSE for 'b'. */
123 #define opf_no_vops (1 << 1)
124
125 /* Operand is an implicit reference. This is used to distinguish
126 explicit assignments in the form of MODIFY_EXPR from
127 clobbering sites like function calls or ASM_EXPRs. */
128 #define opf_implicit (1 << 2)
129
130 /* Operand is in a place where address-taken does not imply addressable. */
131 #define opf_non_addressable (1 << 3)
132
133 /* Operand is in a place where opf_non_addressable does not apply. */
134 #define opf_not_non_addressable (1 << 4)
135
136 /* Array for building all the def operands. */
137 static VEC(tree,heap) *build_defs;
138
139 /* Array for building all the use operands. */
140 static VEC(tree,heap) *build_uses;
141
142 /* The built VDEF operand. */
143 static tree build_vdef;
144
145 /* The built VUSE operand. */
146 static tree build_vuse;
147
148 /* Bitmap obstack for our datastructures that needs to survive across
149 compilations of multiple functions. */
150 static bitmap_obstack operands_bitmap_obstack;
151
152 static void get_expr_operands (gimple, tree *, int);
153
154 /* Number of functions with initialized ssa_operands. */
155 static int n_initialized = 0;
156
157 /* Return the DECL_UID of the base variable of T. */
158
159 static inline unsigned
160 get_name_decl (const_tree t)
161 {
162 if (TREE_CODE (t) != SSA_NAME)
163 return DECL_UID (t);
164 else
165 return DECL_UID (SSA_NAME_VAR (t));
166 }
167
168
169 /* Return true if the SSA operands cache is active. */
170
171 bool
172 ssa_operands_active (void)
173 {
174 /* This function may be invoked from contexts where CFUN is NULL
175 (IPA passes), return false for now. FIXME: operands may be
176 active in each individual function, maybe this function should
177 take CFUN as a parameter. */
178 if (cfun == NULL)
179 return false;
180
181 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
182 }
183
184
185 /* Create the VOP variable, an artificial global variable to act as a
186 representative of all of the virtual operands FUD chain. */
187
188 static void
189 create_vop_var (void)
190 {
191 tree global_var;
192
193 gcc_assert (cfun->gimple_df->vop == NULL_TREE);
194
195 global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
196 get_identifier (".MEM"),
197 void_type_node);
198 DECL_ARTIFICIAL (global_var) = 1;
199 TREE_READONLY (global_var) = 0;
200 DECL_EXTERNAL (global_var) = 1;
201 TREE_STATIC (global_var) = 1;
202 TREE_USED (global_var) = 1;
203 DECL_CONTEXT (global_var) = NULL_TREE;
204 TREE_THIS_VOLATILE (global_var) = 0;
205 TREE_ADDRESSABLE (global_var) = 0;
206
207 create_var_ann (global_var);
208 add_referenced_var (global_var);
209 cfun->gimple_df->vop = global_var;
210 }
211
212 /* These are the sizes of the operand memory buffer in bytes which gets
213 allocated each time more operands space is required. The final value is
214 the amount that is allocated every time after that.
215 In 1k we can fit 25 use operands (or 63 def operands) on a host with
216 8 byte pointers, that would be 10 statements each with 1 def and 2
217 uses. */
218
219 #define OP_SIZE_INIT 0
220 #define OP_SIZE_1 (1024 - sizeof (void *))
221 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
222 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
223
224 /* Initialize the operand cache routines. */
225
226 void
227 init_ssa_operands (void)
228 {
229 if (!n_initialized++)
230 {
231 build_defs = VEC_alloc (tree, heap, 5);
232 build_uses = VEC_alloc (tree, heap, 10);
233 build_vuse = NULL_TREE;
234 build_vdef = NULL_TREE;
235 bitmap_obstack_initialize (&operands_bitmap_obstack);
236 }
237
238 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
239 gimple_ssa_operands (cfun)->operand_memory_index
240 = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
241 gimple_ssa_operands (cfun)->ops_active = true;
242 memset (&clobber_stats, 0, sizeof (clobber_stats));
243 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
244 create_vop_var ();
245 }
246
247
248 /* Dispose of anything required by the operand routines. */
249
250 void
251 fini_ssa_operands (void)
252 {
253 struct ssa_operand_memory_d *ptr;
254
255 if (!--n_initialized)
256 {
257 VEC_free (tree, heap, build_defs);
258 VEC_free (tree, heap, build_uses);
259 build_vdef = NULL_TREE;
260 build_vuse = NULL_TREE;
261 }
262
263 gimple_ssa_operands (cfun)->free_defs = NULL;
264 gimple_ssa_operands (cfun)->free_uses = NULL;
265
266 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
267 {
268 gimple_ssa_operands (cfun)->operand_memory
269 = gimple_ssa_operands (cfun)->operand_memory->next;
270 ggc_free (ptr);
271 }
272
273 gimple_ssa_operands (cfun)->ops_active = false;
274
275 if (!n_initialized)
276 bitmap_obstack_release (&operands_bitmap_obstack);
277
278 cfun->gimple_df->vop = NULL_TREE;
279
280 if (dump_file && (dump_flags & TDF_STATS))
281 {
282 fprintf (dump_file, "Original clobbered vars: %d\n",
283 clobber_stats.clobbered_vars);
284 fprintf (dump_file, "Static write clobbers avoided: %d\n",
285 clobber_stats.static_write_clobbers_avoided);
286 fprintf (dump_file, "Static read clobbers avoided: %d\n",
287 clobber_stats.static_read_clobbers_avoided);
288 fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
289 clobber_stats.unescapable_clobbers_avoided);
290 fprintf (dump_file, "Original read-only clobbers: %d\n",
291 clobber_stats.readonly_clobbers);
292 fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
293 clobber_stats.static_readonly_clobbers_avoided);
294 }
295 }
296
297
298 /* Return memory for an operand of size SIZE. */
299
300 static inline void *
301 ssa_operand_alloc (unsigned size)
302 {
303 char *ptr;
304
305 gcc_assert (size == sizeof (struct use_optype_d)
306 || size == sizeof (struct def_optype_d));
307
308 if (gimple_ssa_operands (cfun)->operand_memory_index + size
309 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
310 {
311 struct ssa_operand_memory_d *ptr;
312
313 switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
314 {
315 case OP_SIZE_INIT:
316 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
317 break;
318 case OP_SIZE_1:
319 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
320 break;
321 case OP_SIZE_2:
322 case OP_SIZE_3:
323 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
324 break;
325 default:
326 gcc_unreachable ();
327 }
328
329
330 ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *)
331 + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
332
333 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
334 gimple_ssa_operands (cfun)->operand_memory = ptr;
335 gimple_ssa_operands (cfun)->operand_memory_index = 0;
336 }
337
338 ptr = &(gimple_ssa_operands (cfun)->operand_memory
339 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
340 gimple_ssa_operands (cfun)->operand_memory_index += size;
341 return ptr;
342 }
343
344
345 /* Allocate a DEF operand. */
346
347 static inline struct def_optype_d *
348 alloc_def (void)
349 {
350 struct def_optype_d *ret;
351 if (gimple_ssa_operands (cfun)->free_defs)
352 {
353 ret = gimple_ssa_operands (cfun)->free_defs;
354 gimple_ssa_operands (cfun)->free_defs
355 = gimple_ssa_operands (cfun)->free_defs->next;
356 }
357 else
358 ret = (struct def_optype_d *)
359 ssa_operand_alloc (sizeof (struct def_optype_d));
360 return ret;
361 }
362
363
364 /* Allocate a USE operand. */
365
366 static inline struct use_optype_d *
367 alloc_use (void)
368 {
369 struct use_optype_d *ret;
370 if (gimple_ssa_operands (cfun)->free_uses)
371 {
372 ret = gimple_ssa_operands (cfun)->free_uses;
373 gimple_ssa_operands (cfun)->free_uses
374 = gimple_ssa_operands (cfun)->free_uses->next;
375 }
376 else
377 ret = (struct use_optype_d *)
378 ssa_operand_alloc (sizeof (struct use_optype_d));
379 return ret;
380 }
381
382
383 /* Adds OP to the list of defs after LAST. */
384
385 static inline def_optype_p
386 add_def_op (tree *op, def_optype_p last)
387 {
388 def_optype_p new_def;
389
390 new_def = alloc_def ();
391 DEF_OP_PTR (new_def) = op;
392 last->next = new_def;
393 new_def->next = NULL;
394 return new_def;
395 }
396
397
398 /* Adds OP to the list of uses of statement STMT after LAST. */
399
400 static inline use_optype_p
401 add_use_op (gimple stmt, tree *op, use_optype_p last)
402 {
403 use_optype_p new_use;
404
405 new_use = alloc_use ();
406 USE_OP_PTR (new_use)->use = op;
407 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
408 last->next = new_use;
409 new_use->next = NULL;
410 return new_use;
411 }
412
413
414
415 /* Takes elements from build_defs and turns them into def operands of STMT.
416 TODO -- Make build_defs VEC of tree *. */
417
418 static inline void
419 finalize_ssa_defs (gimple stmt)
420 {
421 unsigned new_i;
422 struct def_optype_d new_list;
423 def_optype_p old_ops, last;
424 unsigned int num = VEC_length (tree, build_defs);
425
426 /* There should only be a single real definition per assignment. */
427 gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1);
428
429 /* Pre-pend the vdef we may have built. */
430 if (build_vdef != NULL_TREE)
431 {
432 tree oldvdef = gimple_vdef (stmt);
433 if (oldvdef
434 && TREE_CODE (oldvdef) == SSA_NAME)
435 oldvdef = SSA_NAME_VAR (oldvdef);
436 if (oldvdef != build_vdef)
437 gimple_set_vdef (stmt, build_vdef);
438 VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt));
439 ++num;
440 }
441
442 new_list.next = NULL;
443 last = &new_list;
444
445 old_ops = gimple_def_ops (stmt);
446
447 new_i = 0;
448
449 /* Clear and unlink a no longer necessary VDEF. */
450 if (build_vdef == NULL_TREE
451 && gimple_vdef (stmt) != NULL_TREE)
452 {
453 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
454 {
455 unlink_stmt_vdef (stmt);
456 release_ssa_name (gimple_vdef (stmt));
457 }
458 gimple_set_vdef (stmt, NULL_TREE);
459 }
460
461 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
462 if (gimple_vdef (stmt)
463 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
464 mark_sym_for_renaming (gimple_vdef (stmt));
465
466 /* Check for the common case of 1 def that hasn't changed. */
467 if (old_ops && old_ops->next == NULL && num == 1
468 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
469 return;
470
471 /* If there is anything in the old list, free it. */
472 if (old_ops)
473 {
474 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
475 gimple_ssa_operands (cfun)->free_defs = old_ops;
476 }
477
478 /* If there is anything remaining in the build_defs list, simply emit it. */
479 for ( ; new_i < num; new_i++)
480 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
481
482 /* Now set the stmt's operands. */
483 gimple_set_def_ops (stmt, new_list.next);
484 }
485
486
487 /* Takes elements from build_uses and turns them into use operands of STMT.
488 TODO -- Make build_uses VEC of tree *. */
489
490 static inline void
491 finalize_ssa_uses (gimple stmt)
492 {
493 unsigned new_i;
494 struct use_optype_d new_list;
495 use_optype_p old_ops, ptr, last;
496
497 /* Pre-pend the VUSE we may have built. */
498 if (build_vuse != NULL_TREE)
499 {
500 tree oldvuse = gimple_vuse (stmt);
501 if (oldvuse
502 && TREE_CODE (oldvuse) == SSA_NAME)
503 oldvuse = SSA_NAME_VAR (oldvuse);
504 if (oldvuse != (build_vuse != NULL_TREE
505 ? build_vuse : build_vdef))
506 gimple_set_vuse (stmt, NULL_TREE);
507 VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt));
508 }
509
510 new_list.next = NULL;
511 last = &new_list;
512
513 old_ops = gimple_use_ops (stmt);
514
515 /* Clear a no longer necessary VUSE. */
516 if (build_vuse == NULL_TREE
517 && gimple_vuse (stmt) != NULL_TREE)
518 gimple_set_vuse (stmt, NULL_TREE);
519
520 /* If there is anything in the old list, free it. */
521 if (old_ops)
522 {
523 for (ptr = old_ops; ptr; ptr = ptr->next)
524 delink_imm_use (USE_OP_PTR (ptr));
525 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
526 gimple_ssa_operands (cfun)->free_uses = old_ops;
527 }
528
529 /* If we added a VUSE, make sure to set the operand if it is not already
530 present and mark it for renaming. */
531 if (build_vuse != NULL_TREE
532 && gimple_vuse (stmt) == NULL_TREE)
533 {
534 gimple_set_vuse (stmt, gimple_vop (cfun));
535 mark_sym_for_renaming (gimple_vop (cfun));
536 }
537
538 /* Now create nodes for all the new nodes. */
539 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
540 last = add_use_op (stmt,
541 (tree *) VEC_index (tree, build_uses, new_i),
542 last);
543
544 /* Now set the stmt's operands. */
545 gimple_set_use_ops (stmt, new_list.next);
546 }
547
548
549 /* Clear the in_list bits and empty the build array for VDEFs and
550 VUSEs. */
551
552 static inline void
553 cleanup_build_arrays (void)
554 {
555 build_vdef = NULL_TREE;
556 build_vuse = NULL_TREE;
557 VEC_truncate (tree, build_defs, 0);
558 VEC_truncate (tree, build_uses, 0);
559 }
560
561
562 /* Finalize all the build vectors, fill the new ones into INFO. */
563
564 static inline void
565 finalize_ssa_stmt_operands (gimple stmt)
566 {
567 finalize_ssa_defs (stmt);
568 finalize_ssa_uses (stmt);
569 cleanup_build_arrays ();
570 }
571
572
573 /* Start the process of building up operands vectors in INFO. */
574
575 static inline void
576 start_ssa_stmt_operands (void)
577 {
578 gcc_assert (VEC_length (tree, build_defs) == 0);
579 gcc_assert (VEC_length (tree, build_uses) == 0);
580 gcc_assert (build_vuse == NULL_TREE);
581 gcc_assert (build_vdef == NULL_TREE);
582 }
583
584
585 /* Add DEF_P to the list of pointers to operands. */
586
587 static inline void
588 append_def (tree *def_p)
589 {
590 VEC_safe_push (tree, heap, build_defs, (tree) def_p);
591 }
592
593
594 /* Add USE_P to the list of pointers to operands. */
595
596 static inline void
597 append_use (tree *use_p)
598 {
599 VEC_safe_push (tree, heap, build_uses, (tree) use_p);
600 }
601
602
603 /* Add VAR to the set of variables that require a VDEF operator. */
604
605 static inline void
606 append_vdef (tree var)
607 {
608 if (!optimize)
609 return;
610
611 gcc_assert ((build_vdef == NULL_TREE
612 || build_vdef == var)
613 && (build_vuse == NULL_TREE
614 || build_vuse == var));
615
616 build_vdef = var;
617 build_vuse = var;
618 }
619
620
621 /* Add VAR to the set of variables that require a VUSE operator. */
622
623 static inline void
624 append_vuse (tree var)
625 {
626 if (!optimize)
627 return;
628
629 gcc_assert (build_vuse == NULL_TREE
630 || build_vuse == var);
631
632 build_vuse = var;
633 }
634
635 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
636
637 static void
638 add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
639 {
640 /* Add virtual operands to the stmt, unless the caller has specifically
641 requested not to do that (used when adding operands inside an
642 ADDR_EXPR expression). */
643 if (flags & opf_no_vops)
644 return;
645
646 gcc_assert (!is_gimple_debug (stmt));
647
648 if (flags & opf_def)
649 append_vdef (gimple_vop (cfun));
650 else
651 append_vuse (gimple_vop (cfun));
652 }
653
654
655 /* Add *VAR_P to the appropriate operand array for statement STMT.
656 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
657 it will be added to the statement's real operands, otherwise it is
658 added to virtual operands. */
659
660 static void
661 add_stmt_operand (tree *var_p, gimple stmt, int flags)
662 {
663 tree var, sym;
664
665 gcc_assert (SSA_VAR_P (*var_p));
666
667 var = *var_p;
668 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
669
670 /* Mark statements with volatile operands. */
671 if (TREE_THIS_VOLATILE (sym))
672 gimple_set_has_volatile_ops (stmt, true);
673
674 if (is_gimple_reg (sym))
675 {
676 /* The variable is a GIMPLE register. Add it to real operands. */
677 if (flags & opf_def)
678 append_def (var_p);
679 else
680 append_use (var_p);
681 }
682 else
683 add_virtual_operand (stmt, flags);
684 }
685
686 /* Mark the base address of REF as having its address taken.
687 REF may be a single variable whose address has been taken or any
688 other valid GIMPLE memory reference (structure reference, array,
689 etc). */
690
691 static void
692 mark_address_taken (tree ref)
693 {
694 tree var;
695
696 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
697 as the only thing we take the address of. If VAR is a structure,
698 taking the address of a field means that the whole structure may
699 be referenced using pointer arithmetic. See PR 21407 and the
700 ensuing mailing list discussion. */
701 var = get_base_address (ref);
702 if (var)
703 {
704 if (DECL_P (var))
705 TREE_ADDRESSABLE (var) = 1;
706 else if (TREE_CODE (var) == MEM_REF
707 && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
708 && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
709 TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
710 }
711 }
712
713
714 /* A subroutine of get_expr_operands to handle MEM_REF,
715 MISALIGNED_INDIRECT_REF.
716
717 STMT is the statement being processed, EXPR is the MEM_REF
718 that got us here.
719
720 FLAGS is as in get_expr_operands.
721
722 RECURSE_ON_BASE should be set to true if we want to continue
723 calling get_expr_operands on the base pointer, and false if
724 something else will do it for us. */
725
726 static void
727 get_indirect_ref_operands (gimple stmt, tree expr, int flags,
728 bool recurse_on_base)
729 {
730 tree *pptr = &TREE_OPERAND (expr, 0);
731
732 if (TREE_THIS_VOLATILE (expr))
733 gimple_set_has_volatile_ops (stmt, true);
734
735 /* Add the VOP. */
736 add_virtual_operand (stmt, flags);
737
738 /* If requested, add a USE operand for the base pointer. */
739 if (recurse_on_base)
740 get_expr_operands (stmt, pptr,
741 opf_non_addressable | opf_use
742 | (flags & (opf_no_vops|opf_not_non_addressable)));
743 }
744
745
746 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
747
748 static void
749 get_tmr_operands (gimple stmt, tree expr, int flags)
750 {
751 if (TREE_THIS_VOLATILE (expr))
752 gimple_set_has_volatile_ops (stmt, true);
753
754 /* First record the real operands. */
755 get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
756 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
757
758 if (TMR_SYMBOL (expr))
759 mark_address_taken (TREE_OPERAND (TMR_SYMBOL (expr), 0));
760
761 add_virtual_operand (stmt, flags);
762 }
763
764
765 /* If STMT is a call that may clobber globals and other symbols that
766 escape, add them to the VDEF/VUSE lists for it. */
767
768 static void
769 maybe_add_call_vops (gimple stmt)
770 {
771 int call_flags = gimple_call_flags (stmt);
772
773 /* If aliases have been computed already, add VDEF or VUSE
774 operands for all the symbols that have been found to be
775 call-clobbered. */
776 if (!(call_flags & ECF_NOVOPS))
777 {
778 /* A 'pure' or a 'const' function never call-clobbers anything.
779 A 'noreturn' function might, but since we don't return anyway
780 there is no point in recording that. */
781 if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
782 add_virtual_operand (stmt, opf_def);
783 else if (!(call_flags & ECF_CONST))
784 add_virtual_operand (stmt, opf_use);
785 }
786 }
787
788
789 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
790
791 static void
792 get_asm_expr_operands (gimple stmt)
793 {
794 size_t i, noutputs;
795 const char **oconstraints;
796 const char *constraint;
797 bool allows_mem, allows_reg, is_inout;
798
799 noutputs = gimple_asm_noutputs (stmt);
800 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
801
802 /* Gather all output operands. */
803 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
804 {
805 tree link = gimple_asm_output_op (stmt, i);
806 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
807 oconstraints[i] = constraint;
808 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
809 &allows_reg, &is_inout);
810
811 /* This should have been split in gimplify_asm_expr. */
812 gcc_assert (!allows_reg || !is_inout);
813
814 /* Memory operands are addressable. Note that STMT needs the
815 address of this operand. */
816 if (!allows_reg && allows_mem)
817 mark_address_taken (TREE_VALUE (link));
818
819 get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable);
820 }
821
822 /* Gather all input operands. */
823 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
824 {
825 tree link = gimple_asm_input_op (stmt, i);
826 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
827 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
828 &allows_mem, &allows_reg);
829
830 /* Memory operands are addressable. Note that STMT needs the
831 address of this operand. */
832 if (!allows_reg && allows_mem)
833 mark_address_taken (TREE_VALUE (link));
834
835 get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable);
836 }
837
838 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
839 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
840 {
841 tree link = gimple_asm_clobber_op (stmt, i);
842 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
843 {
844 add_virtual_operand (stmt, opf_def);
845 break;
846 }
847 }
848 }
849
850
851 /* Recursively scan the expression pointed to by EXPR_P in statement
852 STMT. FLAGS is one of the OPF_* constants modifying how to
853 interpret the operands found. */
854
855 static void
856 get_expr_operands (gimple stmt, tree *expr_p, int flags)
857 {
858 enum tree_code code;
859 enum tree_code_class codeclass;
860 tree expr = *expr_p;
861 int uflags = opf_use;
862
863 if (expr == NULL)
864 return;
865
866 if (is_gimple_debug (stmt))
867 uflags |= (flags & opf_no_vops);
868
869 code = TREE_CODE (expr);
870 codeclass = TREE_CODE_CLASS (code);
871
872 switch (code)
873 {
874 case ADDR_EXPR:
875 /* Taking the address of a variable does not represent a
876 reference to it, but the fact that the statement takes its
877 address will be of interest to some passes (e.g. alias
878 resolution). */
879 if ((!(flags & opf_non_addressable)
880 || (flags & opf_not_non_addressable))
881 && !is_gimple_debug (stmt))
882 mark_address_taken (TREE_OPERAND (expr, 0));
883
884 /* If the address is invariant, there may be no interesting
885 variable references inside. */
886 if (is_gimple_min_invariant (expr))
887 return;
888
889 /* Otherwise, there may be variables referenced inside but there
890 should be no VUSEs created, since the referenced objects are
891 not really accessed. The only operands that we should find
892 here are ARRAY_REF indices which will always be real operands
893 (GIMPLE does not allow non-registers as array indices). */
894 flags |= opf_no_vops;
895 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
896 flags | opf_not_non_addressable);
897 return;
898
899 case SSA_NAME:
900 add_stmt_operand (expr_p, stmt, flags);
901 return;
902
903 case VAR_DECL:
904 case PARM_DECL:
905 case RESULT_DECL:
906 add_stmt_operand (expr_p, stmt, flags);
907 return;
908
909 case DEBUG_EXPR_DECL:
910 gcc_assert (gimple_debug_bind_p (stmt));
911 return;
912
913 case MISALIGNED_INDIRECT_REF:
914 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
915 /* fall through */
916
917 case MEM_REF:
918 get_indirect_ref_operands (stmt, expr, flags, true);
919 return;
920
921 case TARGET_MEM_REF:
922 get_tmr_operands (stmt, expr, flags);
923 return;
924
925 case ARRAY_REF:
926 case ARRAY_RANGE_REF:
927 case COMPONENT_REF:
928 case REALPART_EXPR:
929 case IMAGPART_EXPR:
930 {
931 if (TREE_THIS_VOLATILE (expr))
932 gimple_set_has_volatile_ops (stmt, true);
933
934 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
935
936 if (code == COMPONENT_REF)
937 {
938 if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
939 gimple_set_has_volatile_ops (stmt, true);
940 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
941 }
942 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
943 {
944 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
945 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
946 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags);
947 }
948
949 return;
950 }
951
952 case WITH_SIZE_EXPR:
953 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
954 and an rvalue reference to its second argument. */
955 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
956 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
957 return;
958
959 case COND_EXPR:
960 case VEC_COND_EXPR:
961 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
962 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
963 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
964 return;
965
966 case CONSTRUCTOR:
967 {
968 /* General aggregate CONSTRUCTORs have been decomposed, but they
969 are still in use as the COMPLEX_EXPR equivalent for vectors. */
970 constructor_elt *ce;
971 unsigned HOST_WIDE_INT idx;
972
973 for (idx = 0;
974 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
975 idx++)
976 get_expr_operands (stmt, &ce->value, uflags);
977
978 return;
979 }
980
981 case BIT_FIELD_REF:
982 if (TREE_THIS_VOLATILE (expr))
983 gimple_set_has_volatile_ops (stmt, true);
984 /* FALLTHRU */
985
986 case TRUTH_NOT_EXPR:
987 case VIEW_CONVERT_EXPR:
988 do_unary:
989 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
990 return;
991
992 case TRUTH_AND_EXPR:
993 case TRUTH_OR_EXPR:
994 case TRUTH_XOR_EXPR:
995 case COMPOUND_EXPR:
996 case OBJ_TYPE_REF:
997 case ASSERT_EXPR:
998 do_binary:
999 {
1000 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1001 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1002 return;
1003 }
1004
1005 case DOT_PROD_EXPR:
1006 case REALIGN_LOAD_EXPR:
1007 case WIDEN_MULT_PLUS_EXPR:
1008 case WIDEN_MULT_MINUS_EXPR:
1009 {
1010 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1011 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1012 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1013 return;
1014 }
1015
1016 case FUNCTION_DECL:
1017 case LABEL_DECL:
1018 case CONST_DECL:
1019 case CASE_LABEL_EXPR:
1020 /* Expressions that make no memory references. */
1021 return;
1022
1023 default:
1024 if (codeclass == tcc_unary)
1025 goto do_unary;
1026 if (codeclass == tcc_binary || codeclass == tcc_comparison)
1027 goto do_binary;
1028 if (codeclass == tcc_constant || codeclass == tcc_type)
1029 return;
1030 }
1031
1032 /* If we get here, something has gone wrong. */
1033 #ifdef ENABLE_CHECKING
1034 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1035 debug_tree (expr);
1036 fputs ("\n", stderr);
1037 #endif
1038 gcc_unreachable ();
1039 }
1040
1041
1042 /* Parse STMT looking for operands. When finished, the various
1043 build_* operand vectors will have potential operands in them. */
1044
1045 static void
1046 parse_ssa_operands (gimple stmt)
1047 {
1048 enum gimple_code code = gimple_code (stmt);
1049
1050 if (code == GIMPLE_ASM)
1051 get_asm_expr_operands (stmt);
1052 else if (is_gimple_debug (stmt))
1053 {
1054 if (gimple_debug_bind_p (stmt)
1055 && gimple_debug_bind_has_value_p (stmt))
1056 get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt),
1057 opf_use | opf_no_vops);
1058 }
1059 else
1060 {
1061 size_t i, start = 0;
1062
1063 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1064 {
1065 get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
1066 start = 1;
1067 }
1068
1069 for (i = start; i < gimple_num_ops (stmt); i++)
1070 get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
1071
1072 /* Add call-clobbered operands, if needed. */
1073 if (code == GIMPLE_CALL)
1074 maybe_add_call_vops (stmt);
1075 }
1076 }
1077
1078
1079 /* Create an operands cache for STMT. */
1080
1081 static void
1082 build_ssa_operands (gimple stmt)
1083 {
1084 /* Initially assume that the statement has no volatile operands. */
1085 gimple_set_has_volatile_ops (stmt, false);
1086
1087 start_ssa_stmt_operands ();
1088 parse_ssa_operands (stmt);
1089 finalize_ssa_stmt_operands (stmt);
1090 }
1091
1092
1093 /* Releases the operands of STMT back to their freelists, and clears
1094 the stmt operand lists. */
1095
1096 void
1097 free_stmt_operands (gimple stmt)
1098 {
1099 def_optype_p defs = gimple_def_ops (stmt), last_def;
1100 use_optype_p uses = gimple_use_ops (stmt), last_use;
1101
1102 if (defs)
1103 {
1104 for (last_def = defs; last_def->next; last_def = last_def->next)
1105 continue;
1106 last_def->next = gimple_ssa_operands (cfun)->free_defs;
1107 gimple_ssa_operands (cfun)->free_defs = defs;
1108 gimple_set_def_ops (stmt, NULL);
1109 }
1110
1111 if (uses)
1112 {
1113 for (last_use = uses; last_use->next; last_use = last_use->next)
1114 delink_imm_use (USE_OP_PTR (last_use));
1115 delink_imm_use (USE_OP_PTR (last_use));
1116 last_use->next = gimple_ssa_operands (cfun)->free_uses;
1117 gimple_ssa_operands (cfun)->free_uses = uses;
1118 gimple_set_use_ops (stmt, NULL);
1119 }
1120
1121 if (gimple_has_mem_ops (stmt))
1122 {
1123 gimple_set_vuse (stmt, NULL_TREE);
1124 gimple_set_vdef (stmt, NULL_TREE);
1125 }
1126 }
1127
1128
1129 /* Get the operands of statement STMT. */
1130
1131 void
1132 update_stmt_operands (gimple stmt)
1133 {
1134 /* If update_stmt_operands is called before SSA is initialized, do
1135 nothing. */
1136 if (!ssa_operands_active ())
1137 return;
1138
1139 timevar_push (TV_TREE_OPS);
1140
1141 gcc_assert (gimple_modified_p (stmt));
1142 build_ssa_operands (stmt);
1143 gimple_set_modified (stmt, false);
1144
1145 timevar_pop (TV_TREE_OPS);
1146 }
1147
1148
1149 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1150 to test the validity of the swap operation. */
1151
1152 void
1153 swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
1154 {
1155 tree op0, op1;
1156 op0 = *exp0;
1157 op1 = *exp1;
1158
1159 /* If the operand cache is active, attempt to preserve the relative
1160 positions of these two operands in their respective immediate use
1161 lists. */
1162 if (ssa_operands_active () && op0 != op1)
1163 {
1164 use_optype_p use0, use1, ptr;
1165 use0 = use1 = NULL;
1166
1167 /* Find the 2 operands in the cache, if they are there. */
1168 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1169 if (USE_OP_PTR (ptr)->use == exp0)
1170 {
1171 use0 = ptr;
1172 break;
1173 }
1174
1175 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1176 if (USE_OP_PTR (ptr)->use == exp1)
1177 {
1178 use1 = ptr;
1179 break;
1180 }
1181
1182 /* If both uses don't have operand entries, there isn't much we can do
1183 at this point. Presumably we don't need to worry about it. */
1184 if (use0 && use1)
1185 {
1186 tree *tmp = USE_OP_PTR (use1)->use;
1187 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1188 USE_OP_PTR (use0)->use = tmp;
1189 }
1190 }
1191
1192 /* Now swap the data. */
1193 *exp0 = op1;
1194 *exp1 = op0;
1195 }
1196
1197
1198 /* Scan the immediate_use list for VAR making sure its linked properly.
1199 Return TRUE if there is a problem and emit an error message to F. */
1200
1201 DEBUG_FUNCTION bool
1202 verify_imm_links (FILE *f, tree var)
1203 {
1204 use_operand_p ptr, prev, list;
1205 int count;
1206
1207 gcc_assert (TREE_CODE (var) == SSA_NAME);
1208
1209 list = &(SSA_NAME_IMM_USE_NODE (var));
1210 gcc_assert (list->use == NULL);
1211
1212 if (list->prev == NULL)
1213 {
1214 gcc_assert (list->next == NULL);
1215 return false;
1216 }
1217
1218 prev = list;
1219 count = 0;
1220 for (ptr = list->next; ptr != list; )
1221 {
1222 if (prev != ptr->prev)
1223 goto error;
1224
1225 if (ptr->use == NULL)
1226 goto error; /* 2 roots, or SAFE guard node. */
1227 else if (*(ptr->use) != var)
1228 goto error;
1229
1230 prev = ptr;
1231 ptr = ptr->next;
1232
1233 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1234 problem. */
1235 if (count++ > 50000000)
1236 goto error;
1237 }
1238
1239 /* Verify list in the other direction. */
1240 prev = list;
1241 for (ptr = list->prev; ptr != list; )
1242 {
1243 if (prev != ptr->next)
1244 goto error;
1245 prev = ptr;
1246 ptr = ptr->prev;
1247 if (count-- < 0)
1248 goto error;
1249 }
1250
1251 if (count != 0)
1252 goto error;
1253
1254 return false;
1255
1256 error:
1257 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1258 {
1259 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1260 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1261 }
1262 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1263 (void *)ptr->use);
1264 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1265 fprintf(f, "\n");
1266 return true;
1267 }
1268
1269
1270 /* Dump all the immediate uses to FILE. */
1271
1272 void
1273 dump_immediate_uses_for (FILE *file, tree var)
1274 {
1275 imm_use_iterator iter;
1276 use_operand_p use_p;
1277
1278 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1279
1280 print_generic_expr (file, var, TDF_SLIM);
1281 fprintf (file, " : -->");
1282 if (has_zero_uses (var))
1283 fprintf (file, " no uses.\n");
1284 else
1285 if (has_single_use (var))
1286 fprintf (file, " single use.\n");
1287 else
1288 fprintf (file, "%d uses.\n", num_imm_uses (var));
1289
1290 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1291 {
1292 if (use_p->loc.stmt == NULL && use_p->use == NULL)
1293 fprintf (file, "***end of stmt iterator marker***\n");
1294 else
1295 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1296 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1297 else
1298 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1299 }
1300 fprintf(file, "\n");
1301 }
1302
1303
1304 /* Dump all the immediate uses to FILE. */
1305
1306 void
1307 dump_immediate_uses (FILE *file)
1308 {
1309 tree var;
1310 unsigned int x;
1311
1312 fprintf (file, "Immediate_uses: \n\n");
1313 for (x = 1; x < num_ssa_names; x++)
1314 {
1315 var = ssa_name(x);
1316 if (!var)
1317 continue;
1318 dump_immediate_uses_for (file, var);
1319 }
1320 }
1321
1322
1323 /* Dump def-use edges on stderr. */
1324
1325 DEBUG_FUNCTION void
1326 debug_immediate_uses (void)
1327 {
1328 dump_immediate_uses (stderr);
1329 }
1330
1331
1332 /* Dump def-use edges on stderr. */
1333
1334 DEBUG_FUNCTION void
1335 debug_immediate_uses_for (tree var)
1336 {
1337 dump_immediate_uses_for (stderr, var);
1338 }
1339
1340
1341 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1342
1343 void
1344 unlink_stmt_vdef (gimple stmt)
1345 {
1346 use_operand_p use_p;
1347 imm_use_iterator iter;
1348 gimple use_stmt;
1349 tree vdef = gimple_vdef (stmt);
1350
1351 if (!vdef
1352 || TREE_CODE (vdef) != SSA_NAME)
1353 return;
1354
1355 FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt))
1356 {
1357 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1358 SET_USE (use_p, gimple_vuse (stmt));
1359 }
1360
1361 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)))
1362 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1;
1363 }
1364