]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-operands.c
Remove trailing white spaces.
[thirdparty/gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
58
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
61
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
65
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
73
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
75 operand vector for VUSE, then the new vector will also be modified
76 such that it contains 'a_5' rather than 'a'. */
77
78 /* Structure storing statistics on how many call clobbers we have, and
79 how many where avoided. */
80
81 static struct
82 {
83 /* Number of call-clobbered ops we attempt to add to calls in
84 add_call_clobbered_mem_symbols. */
85 unsigned int clobbered_vars;
86
87 /* Number of write-clobbers (VDEFs) avoided by using
88 not_written information. */
89 unsigned int static_write_clobbers_avoided;
90
91 /* Number of reads (VUSEs) avoided by using not_read information. */
92 unsigned int static_read_clobbers_avoided;
93
94 /* Number of write-clobbers avoided because the variable can't escape to
95 this call. */
96 unsigned int unescapable_clobbers_avoided;
97
98 /* Number of read-only uses we attempt to add to calls in
99 add_call_read_mem_symbols. */
100 unsigned int readonly_clobbers;
101
102 /* Number of read-only uses we avoid using not_read information. */
103 unsigned int static_readonly_clobbers_avoided;
104 } clobber_stats;
105
106
107 /* Flags to describe operand properties in helpers. */
108
109 /* By default, operands are loaded. */
110 #define opf_use 0
111
112 /* Operand is the target of an assignment expression or a
113 call-clobbered variable. */
114 #define opf_def (1 << 0)
115
116 /* No virtual operands should be created in the expression. This is used
117 when traversing ADDR_EXPR nodes which have different semantics than
118 other expressions. Inside an ADDR_EXPR node, the only operands that we
119 need to consider are indices into arrays. For instance, &a.b[i] should
120 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
121 VUSE for 'b'. */
122 #define opf_no_vops (1 << 1)
123
124 /* Operand is an implicit reference. This is used to distinguish
125 explicit assignments in the form of MODIFY_EXPR from
126 clobbering sites like function calls or ASM_EXPRs. */
127 #define opf_implicit (1 << 2)
128
129 /* Array for building all the def operands. */
130 static VEC(tree,heap) *build_defs;
131
132 /* Array for building all the use operands. */
133 static VEC(tree,heap) *build_uses;
134
135 /* The built VDEF operand. */
136 static tree build_vdef;
137
138 /* The built VUSE operand. */
139 static tree build_vuse;
140
141 /* Bitmap obstack for our datastructures that needs to survive across
142 compilations of multiple functions. */
143 static bitmap_obstack operands_bitmap_obstack;
144
145 static void get_expr_operands (gimple, tree *, int);
146
147 /* Number of functions with initialized ssa_operands. */
148 static int n_initialized = 0;
149
150 /* Return the DECL_UID of the base variable of T. */
151
152 static inline unsigned
153 get_name_decl (const_tree t)
154 {
155 if (TREE_CODE (t) != SSA_NAME)
156 return DECL_UID (t);
157 else
158 return DECL_UID (SSA_NAME_VAR (t));
159 }
160
161
162 /* Return true if the SSA operands cache is active. */
163
164 bool
165 ssa_operands_active (void)
166 {
167 /* This function may be invoked from contexts where CFUN is NULL
168 (IPA passes), return false for now. FIXME: operands may be
169 active in each individual function, maybe this function should
170 take CFUN as a parameter. */
171 if (cfun == NULL)
172 return false;
173
174 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
175 }
176
177
178 /* Create the VOP variable, an artificial global variable to act as a
179 representative of all of the virtual operands FUD chain. */
180
181 static void
182 create_vop_var (void)
183 {
184 tree global_var;
185
186 gcc_assert (cfun->gimple_df->vop == NULL_TREE);
187
188 global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
189 get_identifier (".MEM"),
190 void_type_node);
191 DECL_ARTIFICIAL (global_var) = 1;
192 TREE_READONLY (global_var) = 0;
193 DECL_EXTERNAL (global_var) = 1;
194 TREE_STATIC (global_var) = 1;
195 TREE_USED (global_var) = 1;
196 DECL_CONTEXT (global_var) = NULL_TREE;
197 TREE_THIS_VOLATILE (global_var) = 0;
198 TREE_ADDRESSABLE (global_var) = 0;
199
200 create_var_ann (global_var);
201 add_referenced_var (global_var);
202 cfun->gimple_df->vop = global_var;
203 }
204
205 /* These are the sizes of the operand memory buffer in bytes which gets
206 allocated each time more operands space is required. The final value is
207 the amount that is allocated every time after that.
208 In 1k we can fit 25 use operands (or 63 def operands) on a host with
209 8 byte pointers, that would be 10 statements each with 1 def and 2
210 uses. */
211
212 #define OP_SIZE_INIT 0
213 #define OP_SIZE_1 (1024 - sizeof (void *))
214 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
215 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
216
217 /* Initialize the operand cache routines. */
218
219 void
220 init_ssa_operands (void)
221 {
222 if (!n_initialized++)
223 {
224 build_defs = VEC_alloc (tree, heap, 5);
225 build_uses = VEC_alloc (tree, heap, 10);
226 build_vuse = NULL_TREE;
227 build_vdef = NULL_TREE;
228 bitmap_obstack_initialize (&operands_bitmap_obstack);
229 }
230
231 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
232 gimple_ssa_operands (cfun)->operand_memory_index
233 = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
234 gimple_ssa_operands (cfun)->ops_active = true;
235 memset (&clobber_stats, 0, sizeof (clobber_stats));
236 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
237 create_vop_var ();
238 }
239
240
241 /* Dispose of anything required by the operand routines. */
242
243 void
244 fini_ssa_operands (void)
245 {
246 struct ssa_operand_memory_d *ptr;
247
248 if (!--n_initialized)
249 {
250 VEC_free (tree, heap, build_defs);
251 VEC_free (tree, heap, build_uses);
252 build_vdef = NULL_TREE;
253 build_vuse = NULL_TREE;
254 }
255
256 gimple_ssa_operands (cfun)->free_defs = NULL;
257 gimple_ssa_operands (cfun)->free_uses = NULL;
258
259 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
260 {
261 gimple_ssa_operands (cfun)->operand_memory
262 = gimple_ssa_operands (cfun)->operand_memory->next;
263 ggc_free (ptr);
264 }
265
266 gimple_ssa_operands (cfun)->ops_active = false;
267
268 if (!n_initialized)
269 bitmap_obstack_release (&operands_bitmap_obstack);
270
271 cfun->gimple_df->vop = NULL_TREE;
272
273 if (dump_file && (dump_flags & TDF_STATS))
274 {
275 fprintf (dump_file, "Original clobbered vars: %d\n",
276 clobber_stats.clobbered_vars);
277 fprintf (dump_file, "Static write clobbers avoided: %d\n",
278 clobber_stats.static_write_clobbers_avoided);
279 fprintf (dump_file, "Static read clobbers avoided: %d\n",
280 clobber_stats.static_read_clobbers_avoided);
281 fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
282 clobber_stats.unescapable_clobbers_avoided);
283 fprintf (dump_file, "Original read-only clobbers: %d\n",
284 clobber_stats.readonly_clobbers);
285 fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
286 clobber_stats.static_readonly_clobbers_avoided);
287 }
288 }
289
290
291 /* Return memory for an operand of size SIZE. */
292
293 static inline void *
294 ssa_operand_alloc (unsigned size)
295 {
296 char *ptr;
297
298 gcc_assert (size == sizeof (struct use_optype_d)
299 || size == sizeof (struct def_optype_d));
300
301 if (gimple_ssa_operands (cfun)->operand_memory_index + size
302 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
303 {
304 struct ssa_operand_memory_d *ptr;
305
306 switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
307 {
308 case OP_SIZE_INIT:
309 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
310 break;
311 case OP_SIZE_1:
312 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
313 break;
314 case OP_SIZE_2:
315 case OP_SIZE_3:
316 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
317 break;
318 default:
319 gcc_unreachable ();
320 }
321
322 ptr = (struct ssa_operand_memory_d *)
323 ggc_alloc (sizeof (void *)
324 + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
325 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
326 gimple_ssa_operands (cfun)->operand_memory = ptr;
327 gimple_ssa_operands (cfun)->operand_memory_index = 0;
328 }
329
330 ptr = &(gimple_ssa_operands (cfun)->operand_memory
331 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
332 gimple_ssa_operands (cfun)->operand_memory_index += size;
333 return ptr;
334 }
335
336
337 /* Allocate a DEF operand. */
338
339 static inline struct def_optype_d *
340 alloc_def (void)
341 {
342 struct def_optype_d *ret;
343 if (gimple_ssa_operands (cfun)->free_defs)
344 {
345 ret = gimple_ssa_operands (cfun)->free_defs;
346 gimple_ssa_operands (cfun)->free_defs
347 = gimple_ssa_operands (cfun)->free_defs->next;
348 }
349 else
350 ret = (struct def_optype_d *)
351 ssa_operand_alloc (sizeof (struct def_optype_d));
352 return ret;
353 }
354
355
356 /* Allocate a USE operand. */
357
358 static inline struct use_optype_d *
359 alloc_use (void)
360 {
361 struct use_optype_d *ret;
362 if (gimple_ssa_operands (cfun)->free_uses)
363 {
364 ret = gimple_ssa_operands (cfun)->free_uses;
365 gimple_ssa_operands (cfun)->free_uses
366 = gimple_ssa_operands (cfun)->free_uses->next;
367 }
368 else
369 ret = (struct use_optype_d *)
370 ssa_operand_alloc (sizeof (struct use_optype_d));
371 return ret;
372 }
373
374
375 /* Adds OP to the list of defs after LAST. */
376
377 static inline def_optype_p
378 add_def_op (tree *op, def_optype_p last)
379 {
380 def_optype_p new_def;
381
382 new_def = alloc_def ();
383 DEF_OP_PTR (new_def) = op;
384 last->next = new_def;
385 new_def->next = NULL;
386 return new_def;
387 }
388
389
390 /* Adds OP to the list of uses of statement STMT after LAST. */
391
392 static inline use_optype_p
393 add_use_op (gimple stmt, tree *op, use_optype_p last)
394 {
395 use_optype_p new_use;
396
397 new_use = alloc_use ();
398 USE_OP_PTR (new_use)->use = op;
399 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
400 last->next = new_use;
401 new_use->next = NULL;
402 return new_use;
403 }
404
405
406
407 /* Takes elements from build_defs and turns them into def operands of STMT.
408 TODO -- Make build_defs VEC of tree *. */
409
410 static inline void
411 finalize_ssa_defs (gimple stmt)
412 {
413 unsigned new_i;
414 struct def_optype_d new_list;
415 def_optype_p old_ops, last;
416 unsigned int num = VEC_length (tree, build_defs);
417
418 /* There should only be a single real definition per assignment. */
419 gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1);
420
421 /* Pre-pend the vdef we may have built. */
422 if (build_vdef != NULL_TREE)
423 {
424 tree oldvdef = gimple_vdef (stmt);
425 if (oldvdef
426 && TREE_CODE (oldvdef) == SSA_NAME)
427 oldvdef = SSA_NAME_VAR (oldvdef);
428 if (oldvdef != build_vdef)
429 gimple_set_vdef (stmt, build_vdef);
430 VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt));
431 ++num;
432 }
433
434 new_list.next = NULL;
435 last = &new_list;
436
437 old_ops = gimple_def_ops (stmt);
438
439 new_i = 0;
440
441 /* Clear and unlink a no longer necessary VDEF. */
442 if (build_vdef == NULL_TREE
443 && gimple_vdef (stmt) != NULL_TREE)
444 {
445 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
446 {
447 unlink_stmt_vdef (stmt);
448 release_ssa_name (gimple_vdef (stmt));
449 }
450 gimple_set_vdef (stmt, NULL_TREE);
451 }
452
453 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
454 if (gimple_vdef (stmt)
455 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
456 mark_sym_for_renaming (gimple_vdef (stmt));
457
458 /* Check for the common case of 1 def that hasn't changed. */
459 if (old_ops && old_ops->next == NULL && num == 1
460 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
461 return;
462
463 /* If there is anything in the old list, free it. */
464 if (old_ops)
465 {
466 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
467 gimple_ssa_operands (cfun)->free_defs = old_ops;
468 }
469
470 /* If there is anything remaining in the build_defs list, simply emit it. */
471 for ( ; new_i < num; new_i++)
472 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
473
474 /* Now set the stmt's operands. */
475 gimple_set_def_ops (stmt, new_list.next);
476 }
477
478
479 /* Takes elements from build_uses and turns them into use operands of STMT.
480 TODO -- Make build_uses VEC of tree *. */
481
482 static inline void
483 finalize_ssa_uses (gimple stmt)
484 {
485 unsigned new_i;
486 struct use_optype_d new_list;
487 use_optype_p old_ops, ptr, last;
488
489 /* Pre-pend the VUSE we may have built. */
490 if (build_vuse != NULL_TREE)
491 {
492 tree oldvuse = gimple_vuse (stmt);
493 if (oldvuse
494 && TREE_CODE (oldvuse) == SSA_NAME)
495 oldvuse = SSA_NAME_VAR (oldvuse);
496 if (oldvuse != (build_vuse != NULL_TREE
497 ? build_vuse : build_vdef))
498 gimple_set_vuse (stmt, NULL_TREE);
499 VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt));
500 }
501
502 new_list.next = NULL;
503 last = &new_list;
504
505 old_ops = gimple_use_ops (stmt);
506
507 /* Clear a no longer necessary VUSE. */
508 if (build_vuse == NULL_TREE
509 && gimple_vuse (stmt) != NULL_TREE)
510 gimple_set_vuse (stmt, NULL_TREE);
511
512 /* If there is anything in the old list, free it. */
513 if (old_ops)
514 {
515 for (ptr = old_ops; ptr; ptr = ptr->next)
516 delink_imm_use (USE_OP_PTR (ptr));
517 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
518 gimple_ssa_operands (cfun)->free_uses = old_ops;
519 }
520
521 /* If we added a VUSE, make sure to set the operand if it is not already
522 present and mark it for renaming. */
523 if (build_vuse != NULL_TREE
524 && gimple_vuse (stmt) == NULL_TREE)
525 {
526 gimple_set_vuse (stmt, gimple_vop (cfun));
527 mark_sym_for_renaming (gimple_vop (cfun));
528 }
529
530 /* Now create nodes for all the new nodes. */
531 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
532 last = add_use_op (stmt,
533 (tree *) VEC_index (tree, build_uses, new_i),
534 last);
535
536 /* Now set the stmt's operands. */
537 gimple_set_use_ops (stmt, new_list.next);
538 }
539
540
541 /* Clear the in_list bits and empty the build array for VDEFs and
542 VUSEs. */
543
544 static inline void
545 cleanup_build_arrays (void)
546 {
547 build_vdef = NULL_TREE;
548 build_vuse = NULL_TREE;
549 VEC_truncate (tree, build_defs, 0);
550 VEC_truncate (tree, build_uses, 0);
551 }
552
553
554 /* Finalize all the build vectors, fill the new ones into INFO. */
555
556 static inline void
557 finalize_ssa_stmt_operands (gimple stmt)
558 {
559 finalize_ssa_defs (stmt);
560 finalize_ssa_uses (stmt);
561 cleanup_build_arrays ();
562 }
563
564
565 /* Start the process of building up operands vectors in INFO. */
566
567 static inline void
568 start_ssa_stmt_operands (void)
569 {
570 gcc_assert (VEC_length (tree, build_defs) == 0);
571 gcc_assert (VEC_length (tree, build_uses) == 0);
572 gcc_assert (build_vuse == NULL_TREE);
573 gcc_assert (build_vdef == NULL_TREE);
574 }
575
576
577 /* Add DEF_P to the list of pointers to operands. */
578
579 static inline void
580 append_def (tree *def_p)
581 {
582 VEC_safe_push (tree, heap, build_defs, (tree) def_p);
583 }
584
585
586 /* Add USE_P to the list of pointers to operands. */
587
588 static inline void
589 append_use (tree *use_p)
590 {
591 VEC_safe_push (tree, heap, build_uses, (tree) use_p);
592 }
593
594
595 /* Add VAR to the set of variables that require a VDEF operator. */
596
597 static inline void
598 append_vdef (tree var)
599 {
600 if (!optimize)
601 return;
602
603 gcc_assert ((build_vdef == NULL_TREE
604 || build_vdef == var)
605 && (build_vuse == NULL_TREE
606 || build_vuse == var));
607
608 build_vdef = var;
609 build_vuse = var;
610 }
611
612
613 /* Add VAR to the set of variables that require a VUSE operator. */
614
615 static inline void
616 append_vuse (tree var)
617 {
618 if (!optimize)
619 return;
620
621 gcc_assert (build_vuse == NULL_TREE
622 || build_vuse == var);
623
624 build_vuse = var;
625 }
626
627 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
628
629 static void
630 add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
631 {
632 /* Add virtual operands to the stmt, unless the caller has specifically
633 requested not to do that (used when adding operands inside an
634 ADDR_EXPR expression). */
635 if (flags & opf_no_vops)
636 return;
637
638 gcc_assert (!is_gimple_debug (stmt));
639
640 if (flags & opf_def)
641 append_vdef (gimple_vop (cfun));
642 else
643 append_vuse (gimple_vop (cfun));
644 }
645
646
647 /* Add *VAR_P to the appropriate operand array for statement STMT.
648 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
649 it will be added to the statement's real operands, otherwise it is
650 added to virtual operands. */
651
652 static void
653 add_stmt_operand (tree *var_p, gimple stmt, int flags)
654 {
655 tree var, sym;
656 var_ann_t v_ann;
657
658 gcc_assert (SSA_VAR_P (*var_p));
659
660 var = *var_p;
661 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
662 v_ann = var_ann (sym);
663
664 /* Mark statements with volatile operands. */
665 if (TREE_THIS_VOLATILE (sym))
666 gimple_set_has_volatile_ops (stmt, true);
667
668 if (is_gimple_reg (sym))
669 {
670 /* The variable is a GIMPLE register. Add it to real operands. */
671 if (flags & opf_def)
672 append_def (var_p);
673 else
674 append_use (var_p);
675 }
676 else
677 add_virtual_operand (stmt, flags);
678 }
679
680 /* Mark the base address of REF as having its address taken.
681 REF may be a single variable whose address has been taken or any
682 other valid GIMPLE memory reference (structure reference, array,
683 etc). */
684
685 static void
686 mark_address_taken (tree ref)
687 {
688 tree var;
689
690 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
691 as the only thing we take the address of. If VAR is a structure,
692 taking the address of a field means that the whole structure may
693 be referenced using pointer arithmetic. See PR 21407 and the
694 ensuing mailing list discussion. */
695 var = get_base_address (ref);
696 if (var && DECL_P (var))
697 TREE_ADDRESSABLE (var) = 1;
698 }
699
700
701 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
702 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
703
704 STMT is the statement being processed, EXPR is the INDIRECT_REF
705 that got us here.
706
707 FLAGS is as in get_expr_operands.
708
709 RECURSE_ON_BASE should be set to true if we want to continue
710 calling get_expr_operands on the base pointer, and false if
711 something else will do it for us. */
712
713 static void
714 get_indirect_ref_operands (gimple stmt, tree expr, int flags,
715 bool recurse_on_base)
716 {
717 tree *pptr = &TREE_OPERAND (expr, 0);
718
719 if (TREE_THIS_VOLATILE (expr))
720 gimple_set_has_volatile_ops (stmt, true);
721
722 /* Add the VOP. */
723 add_virtual_operand (stmt, flags);
724
725 /* If requested, add a USE operand for the base pointer. */
726 if (recurse_on_base)
727 get_expr_operands (stmt, pptr,
728 opf_use | (flags & opf_no_vops));
729 }
730
731
732 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
733
734 static void
735 get_tmr_operands (gimple stmt, tree expr, int flags)
736 {
737 /* First record the real operands. */
738 get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
739 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
740
741 if (TMR_SYMBOL (expr))
742 mark_address_taken (TMR_SYMBOL (expr));
743
744 add_virtual_operand (stmt, flags);
745 }
746
747
748 /* If STMT is a call that may clobber globals and other symbols that
749 escape, add them to the VDEF/VUSE lists for it. */
750
751 static void
752 maybe_add_call_vops (gimple stmt)
753 {
754 int call_flags = gimple_call_flags (stmt);
755
756 /* If aliases have been computed already, add VDEF or VUSE
757 operands for all the symbols that have been found to be
758 call-clobbered. */
759 if (!(call_flags & ECF_NOVOPS))
760 {
761 /* A 'pure' or a 'const' function never call-clobbers anything.
762 A 'noreturn' function might, but since we don't return anyway
763 there is no point in recording that. */
764 if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
765 add_virtual_operand (stmt, opf_def);
766 else if (!(call_flags & ECF_CONST))
767 add_virtual_operand (stmt, opf_use);
768 }
769 }
770
771
772 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
773
774 static void
775 get_asm_expr_operands (gimple stmt)
776 {
777 size_t i, noutputs;
778 const char **oconstraints;
779 const char *constraint;
780 bool allows_mem, allows_reg, is_inout;
781
782 noutputs = gimple_asm_noutputs (stmt);
783 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
784
785 /* Gather all output operands. */
786 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
787 {
788 tree link = gimple_asm_output_op (stmt, i);
789 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
790 oconstraints[i] = constraint;
791 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
792 &allows_reg, &is_inout);
793
794 /* This should have been split in gimplify_asm_expr. */
795 gcc_assert (!allows_reg || !is_inout);
796
797 /* Memory operands are addressable. Note that STMT needs the
798 address of this operand. */
799 if (!allows_reg && allows_mem)
800 {
801 tree t = get_base_address (TREE_VALUE (link));
802 if (t && DECL_P (t))
803 mark_address_taken (t);
804 }
805
806 get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
807 }
808
809 /* Gather all input operands. */
810 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
811 {
812 tree link = gimple_asm_input_op (stmt, i);
813 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
814 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
815 &allows_mem, &allows_reg);
816
817 /* Memory operands are addressable. Note that STMT needs the
818 address of this operand. */
819 if (!allows_reg && allows_mem)
820 {
821 tree t = get_base_address (TREE_VALUE (link));
822 if (t && DECL_P (t))
823 mark_address_taken (t);
824 }
825
826 get_expr_operands (stmt, &TREE_VALUE (link), 0);
827 }
828
829 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
830 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
831 {
832 tree link = gimple_asm_clobber_op (stmt, i);
833 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
834 {
835 add_virtual_operand (stmt, opf_def);
836 break;
837 }
838 }
839 }
840
841
842 /* Recursively scan the expression pointed to by EXPR_P in statement
843 STMT. FLAGS is one of the OPF_* constants modifying how to
844 interpret the operands found. */
845
846 static void
847 get_expr_operands (gimple stmt, tree *expr_p, int flags)
848 {
849 enum tree_code code;
850 enum tree_code_class codeclass;
851 tree expr = *expr_p;
852 int uflags = opf_use;
853
854 if (expr == NULL)
855 return;
856
857 if (is_gimple_debug (stmt))
858 uflags |= (flags & opf_no_vops);
859
860 code = TREE_CODE (expr);
861 codeclass = TREE_CODE_CLASS (code);
862
863 switch (code)
864 {
865 case ADDR_EXPR:
866 /* Taking the address of a variable does not represent a
867 reference to it, but the fact that the statement takes its
868 address will be of interest to some passes (e.g. alias
869 resolution). */
870 if (!is_gimple_debug (stmt))
871 mark_address_taken (TREE_OPERAND (expr, 0));
872
873 /* If the address is invariant, there may be no interesting
874 variable references inside. */
875 if (is_gimple_min_invariant (expr))
876 return;
877
878 /* Otherwise, there may be variables referenced inside but there
879 should be no VUSEs created, since the referenced objects are
880 not really accessed. The only operands that we should find
881 here are ARRAY_REF indices which will always be real operands
882 (GIMPLE does not allow non-registers as array indices). */
883 flags |= opf_no_vops;
884 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
885 return;
886
887 case SSA_NAME:
888 add_stmt_operand (expr_p, stmt, flags);
889 return;
890
891 case VAR_DECL:
892 case PARM_DECL:
893 case RESULT_DECL:
894 add_stmt_operand (expr_p, stmt, flags);
895 return;
896
897 case DEBUG_EXPR_DECL:
898 gcc_assert (gimple_debug_bind_p (stmt));
899 return;
900
901 case MISALIGNED_INDIRECT_REF:
902 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
903 /* fall through */
904
905 case ALIGN_INDIRECT_REF:
906 case INDIRECT_REF:
907 get_indirect_ref_operands (stmt, expr, flags, true);
908 return;
909
910 case TARGET_MEM_REF:
911 get_tmr_operands (stmt, expr, flags);
912 return;
913
914 case ARRAY_REF:
915 case ARRAY_RANGE_REF:
916 case COMPONENT_REF:
917 case REALPART_EXPR:
918 case IMAGPART_EXPR:
919 {
920 if (TREE_THIS_VOLATILE (expr))
921 gimple_set_has_volatile_ops (stmt, true);
922
923 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
924
925 if (code == COMPONENT_REF)
926 {
927 if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
928 gimple_set_has_volatile_ops (stmt, true);
929 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
930 }
931 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
932 {
933 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
934 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
935 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags);
936 }
937
938 return;
939 }
940
941 case WITH_SIZE_EXPR:
942 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
943 and an rvalue reference to its second argument. */
944 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
945 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
946 return;
947
948 case COND_EXPR:
949 case VEC_COND_EXPR:
950 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
951 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
952 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
953 return;
954
955 case CONSTRUCTOR:
956 {
957 /* General aggregate CONSTRUCTORs have been decomposed, but they
958 are still in use as the COMPLEX_EXPR equivalent for vectors. */
959 constructor_elt *ce;
960 unsigned HOST_WIDE_INT idx;
961
962 for (idx = 0;
963 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
964 idx++)
965 get_expr_operands (stmt, &ce->value, uflags);
966
967 return;
968 }
969
970 case BIT_FIELD_REF:
971 if (TREE_THIS_VOLATILE (expr))
972 gimple_set_has_volatile_ops (stmt, true);
973 /* FALLTHRU */
974
975 case TRUTH_NOT_EXPR:
976 case VIEW_CONVERT_EXPR:
977 do_unary:
978 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
979 return;
980
981 case TRUTH_AND_EXPR:
982 case TRUTH_OR_EXPR:
983 case TRUTH_XOR_EXPR:
984 case COMPOUND_EXPR:
985 case OBJ_TYPE_REF:
986 case ASSERT_EXPR:
987 do_binary:
988 {
989 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
990 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
991 return;
992 }
993
994 case DOT_PROD_EXPR:
995 case REALIGN_LOAD_EXPR:
996 {
997 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
998 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
999 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1000 return;
1001 }
1002
1003 case FUNCTION_DECL:
1004 case LABEL_DECL:
1005 case CONST_DECL:
1006 case CASE_LABEL_EXPR:
1007 /* Expressions that make no memory references. */
1008 return;
1009
1010 default:
1011 if (codeclass == tcc_unary)
1012 goto do_unary;
1013 if (codeclass == tcc_binary || codeclass == tcc_comparison)
1014 goto do_binary;
1015 if (codeclass == tcc_constant || codeclass == tcc_type)
1016 return;
1017 }
1018
1019 /* If we get here, something has gone wrong. */
1020 #ifdef ENABLE_CHECKING
1021 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1022 debug_tree (expr);
1023 fputs ("\n", stderr);
1024 #endif
1025 gcc_unreachable ();
1026 }
1027
1028
1029 /* Parse STMT looking for operands. When finished, the various
1030 build_* operand vectors will have potential operands in them. */
1031
1032 static void
1033 parse_ssa_operands (gimple stmt)
1034 {
1035 enum gimple_code code = gimple_code (stmt);
1036
1037 if (code == GIMPLE_ASM)
1038 get_asm_expr_operands (stmt);
1039 else if (is_gimple_debug (stmt))
1040 {
1041 if (gimple_debug_bind_p (stmt)
1042 && gimple_debug_bind_has_value_p (stmt))
1043 get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt),
1044 opf_use | opf_no_vops);
1045 }
1046 else
1047 {
1048 size_t i, start = 0;
1049
1050 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1051 {
1052 get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
1053 start = 1;
1054 }
1055
1056 for (i = start; i < gimple_num_ops (stmt); i++)
1057 get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
1058
1059 /* Add call-clobbered operands, if needed. */
1060 if (code == GIMPLE_CALL)
1061 maybe_add_call_vops (stmt);
1062 }
1063 }
1064
1065
1066 /* Create an operands cache for STMT. */
1067
1068 static void
1069 build_ssa_operands (gimple stmt)
1070 {
1071 /* Initially assume that the statement has no volatile operands. */
1072 gimple_set_has_volatile_ops (stmt, false);
1073
1074 start_ssa_stmt_operands ();
1075 parse_ssa_operands (stmt);
1076 finalize_ssa_stmt_operands (stmt);
1077 }
1078
1079
1080 /* Releases the operands of STMT back to their freelists, and clears
1081 the stmt operand lists. */
1082
1083 void
1084 free_stmt_operands (gimple stmt)
1085 {
1086 def_optype_p defs = gimple_def_ops (stmt), last_def;
1087 use_optype_p uses = gimple_use_ops (stmt), last_use;
1088
1089 if (defs)
1090 {
1091 for (last_def = defs; last_def->next; last_def = last_def->next)
1092 continue;
1093 last_def->next = gimple_ssa_operands (cfun)->free_defs;
1094 gimple_ssa_operands (cfun)->free_defs = defs;
1095 gimple_set_def_ops (stmt, NULL);
1096 }
1097
1098 if (uses)
1099 {
1100 for (last_use = uses; last_use->next; last_use = last_use->next)
1101 delink_imm_use (USE_OP_PTR (last_use));
1102 delink_imm_use (USE_OP_PTR (last_use));
1103 last_use->next = gimple_ssa_operands (cfun)->free_uses;
1104 gimple_ssa_operands (cfun)->free_uses = uses;
1105 gimple_set_use_ops (stmt, NULL);
1106 }
1107
1108 if (gimple_has_mem_ops (stmt))
1109 {
1110 gimple_set_vuse (stmt, NULL_TREE);
1111 gimple_set_vdef (stmt, NULL_TREE);
1112 }
1113 }
1114
1115
1116 /* Get the operands of statement STMT. */
1117
1118 void
1119 update_stmt_operands (gimple stmt)
1120 {
1121 /* If update_stmt_operands is called before SSA is initialized, do
1122 nothing. */
1123 if (!ssa_operands_active ())
1124 return;
1125
1126 timevar_push (TV_TREE_OPS);
1127
1128 gcc_assert (gimple_modified_p (stmt));
1129 build_ssa_operands (stmt);
1130 gimple_set_modified (stmt, false);
1131
1132 timevar_pop (TV_TREE_OPS);
1133 }
1134
1135
1136 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1137 to test the validity of the swap operation. */
1138
1139 void
1140 swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
1141 {
1142 tree op0, op1;
1143 op0 = *exp0;
1144 op1 = *exp1;
1145
1146 /* If the operand cache is active, attempt to preserve the relative
1147 positions of these two operands in their respective immediate use
1148 lists. */
1149 if (ssa_operands_active () && op0 != op1)
1150 {
1151 use_optype_p use0, use1, ptr;
1152 use0 = use1 = NULL;
1153
1154 /* Find the 2 operands in the cache, if they are there. */
1155 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1156 if (USE_OP_PTR (ptr)->use == exp0)
1157 {
1158 use0 = ptr;
1159 break;
1160 }
1161
1162 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1163 if (USE_OP_PTR (ptr)->use == exp1)
1164 {
1165 use1 = ptr;
1166 break;
1167 }
1168
1169 /* If both uses don't have operand entries, there isn't much we can do
1170 at this point. Presumably we don't need to worry about it. */
1171 if (use0 && use1)
1172 {
1173 tree *tmp = USE_OP_PTR (use1)->use;
1174 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1175 USE_OP_PTR (use0)->use = tmp;
1176 }
1177 }
1178
1179 /* Now swap the data. */
1180 *exp0 = op1;
1181 *exp1 = op0;
1182 }
1183
1184
1185 /* Scan the immediate_use list for VAR making sure its linked properly.
1186 Return TRUE if there is a problem and emit an error message to F. */
1187
1188 bool
1189 verify_imm_links (FILE *f, tree var)
1190 {
1191 use_operand_p ptr, prev, list;
1192 int count;
1193
1194 gcc_assert (TREE_CODE (var) == SSA_NAME);
1195
1196 list = &(SSA_NAME_IMM_USE_NODE (var));
1197 gcc_assert (list->use == NULL);
1198
1199 if (list->prev == NULL)
1200 {
1201 gcc_assert (list->next == NULL);
1202 return false;
1203 }
1204
1205 prev = list;
1206 count = 0;
1207 for (ptr = list->next; ptr != list; )
1208 {
1209 if (prev != ptr->prev)
1210 goto error;
1211
1212 if (ptr->use == NULL)
1213 goto error; /* 2 roots, or SAFE guard node. */
1214 else if (*(ptr->use) != var)
1215 goto error;
1216
1217 prev = ptr;
1218 ptr = ptr->next;
1219
1220 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1221 problem. */
1222 if (count++ > 50000000)
1223 goto error;
1224 }
1225
1226 /* Verify list in the other direction. */
1227 prev = list;
1228 for (ptr = list->prev; ptr != list; )
1229 {
1230 if (prev != ptr->next)
1231 goto error;
1232 prev = ptr;
1233 ptr = ptr->prev;
1234 if (count-- < 0)
1235 goto error;
1236 }
1237
1238 if (count != 0)
1239 goto error;
1240
1241 return false;
1242
1243 error:
1244 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1245 {
1246 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1247 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1248 }
1249 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1250 (void *)ptr->use);
1251 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1252 fprintf(f, "\n");
1253 return true;
1254 }
1255
1256
1257 /* Dump all the immediate uses to FILE. */
1258
1259 void
1260 dump_immediate_uses_for (FILE *file, tree var)
1261 {
1262 imm_use_iterator iter;
1263 use_operand_p use_p;
1264
1265 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1266
1267 print_generic_expr (file, var, TDF_SLIM);
1268 fprintf (file, " : -->");
1269 if (has_zero_uses (var))
1270 fprintf (file, " no uses.\n");
1271 else
1272 if (has_single_use (var))
1273 fprintf (file, " single use.\n");
1274 else
1275 fprintf (file, "%d uses.\n", num_imm_uses (var));
1276
1277 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1278 {
1279 if (use_p->loc.stmt == NULL && use_p->use == NULL)
1280 fprintf (file, "***end of stmt iterator marker***\n");
1281 else
1282 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1283 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1284 else
1285 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1286 }
1287 fprintf(file, "\n");
1288 }
1289
1290
1291 /* Dump all the immediate uses to FILE. */
1292
1293 void
1294 dump_immediate_uses (FILE *file)
1295 {
1296 tree var;
1297 unsigned int x;
1298
1299 fprintf (file, "Immediate_uses: \n\n");
1300 for (x = 1; x < num_ssa_names; x++)
1301 {
1302 var = ssa_name(x);
1303 if (!var)
1304 continue;
1305 dump_immediate_uses_for (file, var);
1306 }
1307 }
1308
1309
1310 /* Dump def-use edges on stderr. */
1311
1312 void
1313 debug_immediate_uses (void)
1314 {
1315 dump_immediate_uses (stderr);
1316 }
1317
1318
1319 /* Dump def-use edges on stderr. */
1320
1321 void
1322 debug_immediate_uses_for (tree var)
1323 {
1324 dump_immediate_uses_for (stderr, var);
1325 }
1326
1327
1328 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1329
1330 void
1331 unlink_stmt_vdef (gimple stmt)
1332 {
1333 use_operand_p use_p;
1334 imm_use_iterator iter;
1335 gimple use_stmt;
1336 tree vdef = gimple_vdef (stmt);
1337
1338 if (!vdef
1339 || TREE_CODE (vdef) != SSA_NAME)
1340 return;
1341
1342 FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt))
1343 {
1344 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1345 SET_USE (use_p, gimple_vuse (stmt));
1346 }
1347
1348 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)))
1349 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1;
1350 }
1351