]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-operands.c
Factor unrelated declarations out of tree.h.
[thirdparty/gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stmt.h"
26 #include "print-tree.h"
27 #include "flags.h"
28 #include "function.h"
29 #include "gimple-pretty-print.h"
30 #include "bitmap.h"
31 #include "gimple.h"
32 #include "gimple-ssa.h"
33 #include "tree-phinodes.h"
34 #include "ssa-iterators.h"
35 #include "stringpool.h"
36 #include "tree-ssanames.h"
37 #include "tree-inline.h"
38 #include "timevar.h"
39 #include "dumpfile.h"
40 #include "ggc.h"
41 #include "timevar.h"
42 #include "langhooks.h"
43 #include "diagnostic-core.h"
44
45
46 /* This file contains the code required to manage the operands cache of the
47 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
48 annotation. This cache contains operands that will be of interest to
49 optimizers and other passes wishing to manipulate the IL.
50
51 The operand type are broken up into REAL and VIRTUAL operands. The real
52 operands are represented as pointers into the stmt's operand tree. Thus
53 any manipulation of the real operands will be reflected in the actual tree.
54 Virtual operands are represented solely in the cache, although the base
55 variable for the SSA_NAME may, or may not occur in the stmt's tree.
56 Manipulation of the virtual operands will not be reflected in the stmt tree.
57
58 The routines in this file are concerned with creating this operand cache
59 from a stmt tree.
60
61 The operand tree is the parsed by the various get_* routines which look
62 through the stmt tree for the occurrence of operands which may be of
63 interest, and calls are made to the append_* routines whenever one is
64 found. There are 4 of these routines, each representing one of the
65 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
66
67 The append_* routines check for duplication, and simply keep a list of
68 unique objects for each operand type in the build_* extendable vectors.
69
70 Once the stmt tree is completely parsed, the finalize_ssa_operands()
71 routine is called, which proceeds to perform the finalization routine
72 on each of the 4 operand vectors which have been built up.
73
74 If the stmt had a previous operand cache, the finalization routines
75 attempt to match up the new operands with the old ones. If it's a perfect
76 match, the old vector is simply reused. If it isn't a perfect match, then
77 a new vector is created and the new operands are placed there. For
78 virtual operands, if the previous cache had SSA_NAME version of a
79 variable, and that same variable occurs in the same operands cache, then
80 the new cache vector will also get the same SSA_NAME.
81
82 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
83 operand vector for VUSE, then the new vector will also be modified
84 such that it contains 'a_5' rather than 'a'. */
85
86
87 /* Flags to describe operand properties in helpers. */
88
89 /* By default, operands are loaded. */
90 #define opf_use 0
91
92 /* Operand is the target of an assignment expression or a
93 call-clobbered variable. */
94 #define opf_def (1 << 0)
95
96 /* No virtual operands should be created in the expression. This is used
97 when traversing ADDR_EXPR nodes which have different semantics than
98 other expressions. Inside an ADDR_EXPR node, the only operands that we
99 need to consider are indices into arrays. For instance, &a.b[i] should
100 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
101 VUSE for 'b'. */
102 #define opf_no_vops (1 << 1)
103
104 /* Operand is an implicit reference. This is used to distinguish
105 explicit assignments in the form of MODIFY_EXPR from
106 clobbering sites like function calls or ASM_EXPRs. */
107 #define opf_implicit (1 << 2)
108
109 /* Operand is in a place where address-taken does not imply addressable. */
110 #define opf_non_addressable (1 << 3)
111
112 /* Operand is in a place where opf_non_addressable does not apply. */
113 #define opf_not_non_addressable (1 << 4)
114
115 /* Array for building all the use operands. */
116 static vec<tree> build_uses;
117
118 /* The built VDEF operand. */
119 static tree build_vdef;
120
121 /* The built VUSE operand. */
122 static tree build_vuse;
123
124 /* Bitmap obstack for our datastructures that needs to survive across
125 compilations of multiple functions. */
126 static bitmap_obstack operands_bitmap_obstack;
127
128 static void get_expr_operands (gimple, tree *, int);
129
130 /* Number of functions with initialized ssa_operands. */
131 static int n_initialized = 0;
132
133 /* Accessor to tree-ssa-operands.c caches. */
134 static inline struct ssa_operands *
135 gimple_ssa_operands (const struct function *fun)
136 {
137 return &fun->gimple_df->ssa_operands;
138 }
139
140
141 /* Return true if the SSA operands cache is active. */
142
143 bool
144 ssa_operands_active (struct function *fun)
145 {
146 if (fun == NULL)
147 return false;
148
149 return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
150 }
151
152
153 /* Create the VOP variable, an artificial global variable to act as a
154 representative of all of the virtual operands FUD chain. */
155
156 static void
157 create_vop_var (struct function *fn)
158 {
159 tree global_var;
160
161 gcc_assert (fn->gimple_df->vop == NULL_TREE);
162
163 global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
164 get_identifier (".MEM"),
165 void_type_node);
166 DECL_ARTIFICIAL (global_var) = 1;
167 TREE_READONLY (global_var) = 0;
168 DECL_EXTERNAL (global_var) = 1;
169 TREE_STATIC (global_var) = 1;
170 TREE_USED (global_var) = 1;
171 DECL_CONTEXT (global_var) = NULL_TREE;
172 TREE_THIS_VOLATILE (global_var) = 0;
173 TREE_ADDRESSABLE (global_var) = 0;
174 VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
175
176 fn->gimple_df->vop = global_var;
177 }
178
179 /* These are the sizes of the operand memory buffer in bytes which gets
180 allocated each time more operands space is required. The final value is
181 the amount that is allocated every time after that.
182 In 1k we can fit 25 use operands (or 63 def operands) on a host with
183 8 byte pointers, that would be 10 statements each with 1 def and 2
184 uses. */
185
186 #define OP_SIZE_INIT 0
187 #define OP_SIZE_1 (1024 - sizeof (void *))
188 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
189 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
190
191 /* Initialize the operand cache routines. */
192
193 void
194 init_ssa_operands (struct function *fn)
195 {
196 if (!n_initialized++)
197 {
198 build_uses.create (10);
199 build_vuse = NULL_TREE;
200 build_vdef = NULL_TREE;
201 bitmap_obstack_initialize (&operands_bitmap_obstack);
202 }
203
204 gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
205 gimple_ssa_operands (fn)->operand_memory_index
206 = gimple_ssa_operands (fn)->ssa_operand_mem_size;
207 gimple_ssa_operands (fn)->ops_active = true;
208 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
209 create_vop_var (fn);
210 }
211
212
213 /* Dispose of anything required by the operand routines. */
214
215 void
216 fini_ssa_operands (void)
217 {
218 struct ssa_operand_memory_d *ptr;
219
220 if (!--n_initialized)
221 {
222 build_uses.release ();
223 build_vdef = NULL_TREE;
224 build_vuse = NULL_TREE;
225 }
226
227 gimple_ssa_operands (cfun)->free_uses = NULL;
228
229 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
230 {
231 gimple_ssa_operands (cfun)->operand_memory
232 = gimple_ssa_operands (cfun)->operand_memory->next;
233 ggc_free (ptr);
234 }
235
236 gimple_ssa_operands (cfun)->ops_active = false;
237
238 if (!n_initialized)
239 bitmap_obstack_release (&operands_bitmap_obstack);
240
241 cfun->gimple_df->vop = NULL_TREE;
242 }
243
244
245 /* Return memory for an operand of size SIZE. */
246
247 static inline void *
248 ssa_operand_alloc (unsigned size)
249 {
250 char *ptr;
251
252 gcc_assert (size == sizeof (struct use_optype_d));
253
254 if (gimple_ssa_operands (cfun)->operand_memory_index + size
255 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
256 {
257 struct ssa_operand_memory_d *ptr;
258
259 switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
260 {
261 case OP_SIZE_INIT:
262 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
263 break;
264 case OP_SIZE_1:
265 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
266 break;
267 case OP_SIZE_2:
268 case OP_SIZE_3:
269 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
270 break;
271 default:
272 gcc_unreachable ();
273 }
274
275
276 ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *)
277 + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
278
279 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
280 gimple_ssa_operands (cfun)->operand_memory = ptr;
281 gimple_ssa_operands (cfun)->operand_memory_index = 0;
282 }
283
284 ptr = &(gimple_ssa_operands (cfun)->operand_memory
285 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
286 gimple_ssa_operands (cfun)->operand_memory_index += size;
287 return ptr;
288 }
289
290
291 /* Allocate a USE operand. */
292
293 static inline struct use_optype_d *
294 alloc_use (void)
295 {
296 struct use_optype_d *ret;
297 if (gimple_ssa_operands (cfun)->free_uses)
298 {
299 ret = gimple_ssa_operands (cfun)->free_uses;
300 gimple_ssa_operands (cfun)->free_uses
301 = gimple_ssa_operands (cfun)->free_uses->next;
302 }
303 else
304 ret = (struct use_optype_d *)
305 ssa_operand_alloc (sizeof (struct use_optype_d));
306 return ret;
307 }
308
309
310 /* Adds OP to the list of uses of statement STMT after LAST. */
311
312 static inline use_optype_p
313 add_use_op (gimple stmt, tree *op, use_optype_p last)
314 {
315 use_optype_p new_use;
316
317 new_use = alloc_use ();
318 USE_OP_PTR (new_use)->use = op;
319 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
320 last->next = new_use;
321 new_use->next = NULL;
322 return new_use;
323 }
324
325
326
327 /* Takes elements from build_defs and turns them into def operands of STMT.
328 TODO -- Make build_defs vec of tree *. */
329
330 static inline void
331 finalize_ssa_defs (gimple stmt)
332 {
333 /* Pre-pend the vdef we may have built. */
334 if (build_vdef != NULL_TREE)
335 {
336 tree oldvdef = gimple_vdef (stmt);
337 if (oldvdef
338 && TREE_CODE (oldvdef) == SSA_NAME)
339 oldvdef = SSA_NAME_VAR (oldvdef);
340 if (oldvdef != build_vdef)
341 gimple_set_vdef (stmt, build_vdef);
342 }
343
344 /* Clear and unlink a no longer necessary VDEF. */
345 if (build_vdef == NULL_TREE
346 && gimple_vdef (stmt) != NULL_TREE)
347 {
348 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
349 {
350 unlink_stmt_vdef (stmt);
351 release_ssa_name (gimple_vdef (stmt));
352 }
353 gimple_set_vdef (stmt, NULL_TREE);
354 }
355
356 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
357 if (gimple_vdef (stmt)
358 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
359 {
360 cfun->gimple_df->rename_vops = 1;
361 cfun->gimple_df->ssa_renaming_needed = 1;
362 }
363 }
364
365
366 /* Takes elements from build_uses and turns them into use operands of STMT.
367 TODO -- Make build_uses vec of tree *. */
368
369 static inline void
370 finalize_ssa_uses (gimple stmt)
371 {
372 unsigned new_i;
373 struct use_optype_d new_list;
374 use_optype_p old_ops, ptr, last;
375
376 /* Pre-pend the VUSE we may have built. */
377 if (build_vuse != NULL_TREE)
378 {
379 tree oldvuse = gimple_vuse (stmt);
380 if (oldvuse
381 && TREE_CODE (oldvuse) == SSA_NAME)
382 oldvuse = SSA_NAME_VAR (oldvuse);
383 if (oldvuse != (build_vuse != NULL_TREE
384 ? build_vuse : build_vdef))
385 gimple_set_vuse (stmt, NULL_TREE);
386 build_uses.safe_insert (0, (tree)gimple_vuse_ptr (stmt));
387 }
388
389 new_list.next = NULL;
390 last = &new_list;
391
392 old_ops = gimple_use_ops (stmt);
393
394 /* Clear a no longer necessary VUSE. */
395 if (build_vuse == NULL_TREE
396 && gimple_vuse (stmt) != NULL_TREE)
397 gimple_set_vuse (stmt, NULL_TREE);
398
399 /* If there is anything in the old list, free it. */
400 if (old_ops)
401 {
402 for (ptr = old_ops; ptr; ptr = ptr->next)
403 delink_imm_use (USE_OP_PTR (ptr));
404 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
405 gimple_ssa_operands (cfun)->free_uses = old_ops;
406 }
407
408 /* If we added a VUSE, make sure to set the operand if it is not already
409 present and mark it for renaming. */
410 if (build_vuse != NULL_TREE
411 && gimple_vuse (stmt) == NULL_TREE)
412 {
413 gimple_set_vuse (stmt, gimple_vop (cfun));
414 cfun->gimple_df->rename_vops = 1;
415 cfun->gimple_df->ssa_renaming_needed = 1;
416 }
417
418 /* Now create nodes for all the new nodes. */
419 for (new_i = 0; new_i < build_uses.length (); new_i++)
420 {
421 tree *op = (tree *) build_uses[new_i];
422 last = add_use_op (stmt, op, last);
423 }
424
425 /* Now set the stmt's operands. */
426 gimple_set_use_ops (stmt, new_list.next);
427 }
428
429
430 /* Clear the in_list bits and empty the build array for VDEFs and
431 VUSEs. */
432
433 static inline void
434 cleanup_build_arrays (void)
435 {
436 build_vdef = NULL_TREE;
437 build_vuse = NULL_TREE;
438 build_uses.truncate (0);
439 }
440
441
442 /* Finalize all the build vectors, fill the new ones into INFO. */
443
444 static inline void
445 finalize_ssa_stmt_operands (gimple stmt)
446 {
447 finalize_ssa_defs (stmt);
448 finalize_ssa_uses (stmt);
449 cleanup_build_arrays ();
450 }
451
452
453 /* Start the process of building up operands vectors in INFO. */
454
455 static inline void
456 start_ssa_stmt_operands (void)
457 {
458 gcc_assert (build_uses.length () == 0);
459 gcc_assert (build_vuse == NULL_TREE);
460 gcc_assert (build_vdef == NULL_TREE);
461 }
462
463
464 /* Add USE_P to the list of pointers to operands. */
465
466 static inline void
467 append_use (tree *use_p)
468 {
469 build_uses.safe_push ((tree) use_p);
470 }
471
472
473 /* Add VAR to the set of variables that require a VDEF operator. */
474
475 static inline void
476 append_vdef (tree var)
477 {
478 if (!optimize)
479 return;
480
481 gcc_assert ((build_vdef == NULL_TREE
482 || build_vdef == var)
483 && (build_vuse == NULL_TREE
484 || build_vuse == var));
485
486 build_vdef = var;
487 build_vuse = var;
488 }
489
490
491 /* Add VAR to the set of variables that require a VUSE operator. */
492
493 static inline void
494 append_vuse (tree var)
495 {
496 if (!optimize)
497 return;
498
499 gcc_assert (build_vuse == NULL_TREE
500 || build_vuse == var);
501
502 build_vuse = var;
503 }
504
505 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
506
507 static void
508 add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
509 {
510 /* Add virtual operands to the stmt, unless the caller has specifically
511 requested not to do that (used when adding operands inside an
512 ADDR_EXPR expression). */
513 if (flags & opf_no_vops)
514 return;
515
516 gcc_assert (!is_gimple_debug (stmt));
517
518 if (flags & opf_def)
519 append_vdef (gimple_vop (cfun));
520 else
521 append_vuse (gimple_vop (cfun));
522 }
523
524
525 /* Add *VAR_P to the appropriate operand array for statement STMT.
526 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
527 it will be added to the statement's real operands, otherwise it is
528 added to virtual operands. */
529
530 static void
531 add_stmt_operand (tree *var_p, gimple stmt, int flags)
532 {
533 tree var = *var_p;
534
535 gcc_assert (SSA_VAR_P (*var_p));
536
537 if (is_gimple_reg (var))
538 {
539 /* The variable is a GIMPLE register. Add it to real operands. */
540 if (flags & opf_def)
541 ;
542 else
543 append_use (var_p);
544 if (DECL_P (*var_p))
545 cfun->gimple_df->ssa_renaming_needed = 1;
546 }
547 else
548 {
549 /* Mark statements with volatile operands. */
550 if (!(flags & opf_no_vops)
551 && TREE_THIS_VOLATILE (var))
552 gimple_set_has_volatile_ops (stmt, true);
553
554 /* The variable is a memory access. Add virtual operands. */
555 add_virtual_operand (stmt, flags);
556 }
557 }
558
559 /* Mark the base address of REF as having its address taken.
560 REF may be a single variable whose address has been taken or any
561 other valid GIMPLE memory reference (structure reference, array,
562 etc). */
563
564 static void
565 mark_address_taken (tree ref)
566 {
567 tree var;
568
569 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
570 as the only thing we take the address of. If VAR is a structure,
571 taking the address of a field means that the whole structure may
572 be referenced using pointer arithmetic. See PR 21407 and the
573 ensuing mailing list discussion. */
574 var = get_base_address (ref);
575 if (var)
576 {
577 if (DECL_P (var))
578 TREE_ADDRESSABLE (var) = 1;
579 else if (TREE_CODE (var) == MEM_REF
580 && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
581 && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
582 TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
583 }
584 }
585
586
587 /* A subroutine of get_expr_operands to handle MEM_REF.
588
589 STMT is the statement being processed, EXPR is the MEM_REF
590 that got us here.
591
592 FLAGS is as in get_expr_operands. */
593
594 static void
595 get_indirect_ref_operands (gimple stmt, tree expr, int flags)
596 {
597 tree *pptr = &TREE_OPERAND (expr, 0);
598
599 if (!(flags & opf_no_vops)
600 && TREE_THIS_VOLATILE (expr))
601 gimple_set_has_volatile_ops (stmt, true);
602
603 /* Add the VOP. */
604 add_virtual_operand (stmt, flags);
605
606 /* If requested, add a USE operand for the base pointer. */
607 get_expr_operands (stmt, pptr,
608 opf_non_addressable | opf_use
609 | (flags & (opf_no_vops|opf_not_non_addressable)));
610 }
611
612
613 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
614
615 static void
616 get_tmr_operands (gimple stmt, tree expr, int flags)
617 {
618 if (!(flags & opf_no_vops)
619 && TREE_THIS_VOLATILE (expr))
620 gimple_set_has_volatile_ops (stmt, true);
621
622 /* First record the real operands. */
623 get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
624 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
625 get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
626
627 add_virtual_operand (stmt, flags);
628 }
629
630
631 /* If STMT is a call that may clobber globals and other symbols that
632 escape, add them to the VDEF/VUSE lists for it. */
633
634 static void
635 maybe_add_call_vops (gimple stmt)
636 {
637 int call_flags = gimple_call_flags (stmt);
638
639 /* If aliases have been computed already, add VDEF or VUSE
640 operands for all the symbols that have been found to be
641 call-clobbered. */
642 if (!(call_flags & ECF_NOVOPS))
643 {
644 /* A 'pure' or a 'const' function never call-clobbers anything.
645 A 'noreturn' function might, but since we don't return anyway
646 there is no point in recording that. */
647 if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
648 add_virtual_operand (stmt, opf_def);
649 else if (!(call_flags & ECF_CONST))
650 add_virtual_operand (stmt, opf_use);
651 }
652 }
653
654
655 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
656
657 static void
658 get_asm_expr_operands (gimple stmt)
659 {
660 size_t i, noutputs;
661 const char **oconstraints;
662 const char *constraint;
663 bool allows_mem, allows_reg, is_inout;
664
665 noutputs = gimple_asm_noutputs (stmt);
666 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
667
668 /* Gather all output operands. */
669 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
670 {
671 tree link = gimple_asm_output_op (stmt, i);
672 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
673 oconstraints[i] = constraint;
674 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
675 &allows_reg, &is_inout);
676
677 /* This should have been split in gimplify_asm_expr. */
678 gcc_assert (!allows_reg || !is_inout);
679
680 /* Memory operands are addressable. Note that STMT needs the
681 address of this operand. */
682 if (!allows_reg && allows_mem)
683 mark_address_taken (TREE_VALUE (link));
684
685 get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable);
686 }
687
688 /* Gather all input operands. */
689 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
690 {
691 tree link = gimple_asm_input_op (stmt, i);
692 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
693 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
694 &allows_mem, &allows_reg);
695
696 /* Memory operands are addressable. Note that STMT needs the
697 address of this operand. */
698 if (!allows_reg && allows_mem)
699 mark_address_taken (TREE_VALUE (link));
700
701 get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable);
702 }
703
704 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
705 if (gimple_asm_clobbers_memory_p (stmt))
706 add_virtual_operand (stmt, opf_def);
707 }
708
709
710 /* Recursively scan the expression pointed to by EXPR_P in statement
711 STMT. FLAGS is one of the OPF_* constants modifying how to
712 interpret the operands found. */
713
714 static void
715 get_expr_operands (gimple stmt, tree *expr_p, int flags)
716 {
717 enum tree_code code;
718 enum tree_code_class codeclass;
719 tree expr = *expr_p;
720 int uflags = opf_use;
721
722 if (expr == NULL)
723 return;
724
725 if (is_gimple_debug (stmt))
726 uflags |= (flags & opf_no_vops);
727
728 code = TREE_CODE (expr);
729 codeclass = TREE_CODE_CLASS (code);
730
731 switch (code)
732 {
733 case ADDR_EXPR:
734 /* Taking the address of a variable does not represent a
735 reference to it, but the fact that the statement takes its
736 address will be of interest to some passes (e.g. alias
737 resolution). */
738 if ((!(flags & opf_non_addressable)
739 || (flags & opf_not_non_addressable))
740 && !is_gimple_debug (stmt))
741 mark_address_taken (TREE_OPERAND (expr, 0));
742
743 /* If the address is invariant, there may be no interesting
744 variable references inside. */
745 if (is_gimple_min_invariant (expr))
746 return;
747
748 /* Otherwise, there may be variables referenced inside but there
749 should be no VUSEs created, since the referenced objects are
750 not really accessed. The only operands that we should find
751 here are ARRAY_REF indices which will always be real operands
752 (GIMPLE does not allow non-registers as array indices). */
753 flags |= opf_no_vops;
754 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
755 flags | opf_not_non_addressable);
756 return;
757
758 case SSA_NAME:
759 case VAR_DECL:
760 case PARM_DECL:
761 case RESULT_DECL:
762 add_stmt_operand (expr_p, stmt, flags);
763 return;
764
765 case DEBUG_EXPR_DECL:
766 gcc_assert (gimple_debug_bind_p (stmt));
767 return;
768
769 case MEM_REF:
770 get_indirect_ref_operands (stmt, expr, flags);
771 return;
772
773 case TARGET_MEM_REF:
774 get_tmr_operands (stmt, expr, flags);
775 return;
776
777 case ARRAY_REF:
778 case ARRAY_RANGE_REF:
779 case COMPONENT_REF:
780 case REALPART_EXPR:
781 case IMAGPART_EXPR:
782 {
783 if (!(flags & opf_no_vops)
784 && TREE_THIS_VOLATILE (expr))
785 gimple_set_has_volatile_ops (stmt, true);
786
787 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
788
789 if (code == COMPONENT_REF)
790 {
791 if (!(flags & opf_no_vops)
792 && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
793 gimple_set_has_volatile_ops (stmt, true);
794 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
795 }
796 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
797 {
798 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
799 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
800 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags);
801 }
802
803 return;
804 }
805
806 case WITH_SIZE_EXPR:
807 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
808 and an rvalue reference to its second argument. */
809 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
810 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
811 return;
812
813 case COND_EXPR:
814 case VEC_COND_EXPR:
815 case VEC_PERM_EXPR:
816 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
817 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
818 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
819 return;
820
821 case CONSTRUCTOR:
822 {
823 /* General aggregate CONSTRUCTORs have been decomposed, but they
824 are still in use as the COMPLEX_EXPR equivalent for vectors. */
825 constructor_elt *ce;
826 unsigned HOST_WIDE_INT idx;
827
828 /* A volatile constructor is actually TREE_CLOBBER_P, transfer
829 the volatility to the statement, don't use TREE_CLOBBER_P for
830 mirroring the other uses of THIS_VOLATILE in this file. */
831 if (!(flags & opf_no_vops)
832 && TREE_THIS_VOLATILE (expr))
833 gimple_set_has_volatile_ops (stmt, true);
834
835 for (idx = 0;
836 vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
837 idx++)
838 get_expr_operands (stmt, &ce->value, uflags);
839
840 return;
841 }
842
843 case BIT_FIELD_REF:
844 if (!(flags & opf_no_vops)
845 && TREE_THIS_VOLATILE (expr))
846 gimple_set_has_volatile_ops (stmt, true);
847 /* FALLTHRU */
848
849 case VIEW_CONVERT_EXPR:
850 do_unary:
851 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
852 return;
853
854 case COMPOUND_EXPR:
855 case OBJ_TYPE_REF:
856 case ASSERT_EXPR:
857 do_binary:
858 {
859 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
860 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
861 return;
862 }
863
864 case DOT_PROD_EXPR:
865 case REALIGN_LOAD_EXPR:
866 case WIDEN_MULT_PLUS_EXPR:
867 case WIDEN_MULT_MINUS_EXPR:
868 case FMA_EXPR:
869 {
870 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
871 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
872 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
873 return;
874 }
875
876 case FUNCTION_DECL:
877 case LABEL_DECL:
878 case CONST_DECL:
879 case CASE_LABEL_EXPR:
880 /* Expressions that make no memory references. */
881 return;
882
883 default:
884 if (codeclass == tcc_unary)
885 goto do_unary;
886 if (codeclass == tcc_binary || codeclass == tcc_comparison)
887 goto do_binary;
888 if (codeclass == tcc_constant || codeclass == tcc_type)
889 return;
890 }
891
892 /* If we get here, something has gone wrong. */
893 #ifdef ENABLE_CHECKING
894 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
895 debug_tree (expr);
896 fputs ("\n", stderr);
897 #endif
898 gcc_unreachable ();
899 }
900
901
902 /* Parse STMT looking for operands. When finished, the various
903 build_* operand vectors will have potential operands in them. */
904
905 static void
906 parse_ssa_operands (gimple stmt)
907 {
908 enum gimple_code code = gimple_code (stmt);
909 size_t i, n, start = 0;
910
911 switch (code)
912 {
913 case GIMPLE_ASM:
914 get_asm_expr_operands (stmt);
915 break;
916
917 case GIMPLE_TRANSACTION:
918 /* The start of a transaction is a memory barrier. */
919 add_virtual_operand (stmt, opf_def | opf_use);
920 break;
921
922 case GIMPLE_DEBUG:
923 if (gimple_debug_bind_p (stmt)
924 && gimple_debug_bind_has_value_p (stmt))
925 get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt),
926 opf_use | opf_no_vops);
927 break;
928
929 case GIMPLE_RETURN:
930 append_vuse (gimple_vop (cfun));
931 goto do_default;
932
933 case GIMPLE_CALL:
934 /* Add call-clobbered operands, if needed. */
935 maybe_add_call_vops (stmt);
936 /* FALLTHRU */
937
938 case GIMPLE_ASSIGN:
939 get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
940 start = 1;
941 /* FALLTHRU */
942
943 default:
944 do_default:
945 n = gimple_num_ops (stmt);
946 for (i = start; i < n; i++)
947 get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
948 break;
949 }
950 }
951
952
953 /* Create an operands cache for STMT. */
954
955 static void
956 build_ssa_operands (gimple stmt)
957 {
958 /* Initially assume that the statement has no volatile operands. */
959 gimple_set_has_volatile_ops (stmt, false);
960
961 start_ssa_stmt_operands ();
962 parse_ssa_operands (stmt);
963 finalize_ssa_stmt_operands (stmt);
964 }
965
966 /* Verifies SSA statement operands. */
967
968 DEBUG_FUNCTION bool
969 verify_ssa_operands (gimple stmt)
970 {
971 use_operand_p use_p;
972 def_operand_p def_p;
973 ssa_op_iter iter;
974 unsigned i;
975 tree use, def;
976 bool volatile_p = gimple_has_volatile_ops (stmt);
977
978 /* build_ssa_operands w/o finalizing them. */
979 gimple_set_has_volatile_ops (stmt, false);
980 start_ssa_stmt_operands ();
981 parse_ssa_operands (stmt);
982
983 /* Now verify the built operands are the same as present in STMT. */
984 def = gimple_vdef (stmt);
985 if (def
986 && TREE_CODE (def) == SSA_NAME)
987 def = SSA_NAME_VAR (def);
988 if (build_vdef != def)
989 {
990 error ("virtual definition of statement not up-to-date");
991 return true;
992 }
993 if (gimple_vdef (stmt)
994 && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
995 || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
996 {
997 error ("virtual def operand missing for stmt");
998 return true;
999 }
1000
1001 use = gimple_vuse (stmt);
1002 if (use
1003 && TREE_CODE (use) == SSA_NAME)
1004 use = SSA_NAME_VAR (use);
1005 if (build_vuse != use)
1006 {
1007 error ("virtual use of statement not up-to-date");
1008 return true;
1009 }
1010 if (gimple_vuse (stmt)
1011 && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
1012 || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
1013 {
1014 error ("virtual use operand missing for stmt");
1015 return true;
1016 }
1017
1018 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1019 {
1020 FOR_EACH_VEC_ELT (build_uses, i, use)
1021 {
1022 if (use_p->use == (tree *)use)
1023 {
1024 build_uses[i] = NULL_TREE;
1025 break;
1026 }
1027 }
1028 if (i == build_uses.length ())
1029 {
1030 error ("excess use operand for stmt");
1031 debug_generic_expr (USE_FROM_PTR (use_p));
1032 return true;
1033 }
1034 }
1035 FOR_EACH_VEC_ELT (build_uses, i, use)
1036 if (use != NULL_TREE)
1037 {
1038 error ("use operand missing for stmt");
1039 debug_generic_expr (*(tree *)use);
1040 return true;
1041 }
1042
1043 if (gimple_has_volatile_ops (stmt) != volatile_p)
1044 {
1045 error ("stmt volatile flag not up-to-date");
1046 return true;
1047 }
1048
1049 cleanup_build_arrays ();
1050 return false;
1051 }
1052
1053
1054 /* Releases the operands of STMT back to their freelists, and clears
1055 the stmt operand lists. */
1056
1057 void
1058 free_stmt_operands (gimple stmt)
1059 {
1060 use_optype_p uses = gimple_use_ops (stmt), last_use;
1061
1062 if (uses)
1063 {
1064 for (last_use = uses; last_use->next; last_use = last_use->next)
1065 delink_imm_use (USE_OP_PTR (last_use));
1066 delink_imm_use (USE_OP_PTR (last_use));
1067 last_use->next = gimple_ssa_operands (cfun)->free_uses;
1068 gimple_ssa_operands (cfun)->free_uses = uses;
1069 gimple_set_use_ops (stmt, NULL);
1070 }
1071
1072 if (gimple_has_mem_ops (stmt))
1073 {
1074 gimple_set_vuse (stmt, NULL_TREE);
1075 gimple_set_vdef (stmt, NULL_TREE);
1076 }
1077 }
1078
1079
1080 /* Get the operands of statement STMT. */
1081
1082 void
1083 update_stmt_operands (gimple stmt)
1084 {
1085 /* If update_stmt_operands is called before SSA is initialized, do
1086 nothing. */
1087 if (!ssa_operands_active (cfun))
1088 return;
1089
1090 timevar_push (TV_TREE_OPS);
1091
1092 /* If the stmt is a noreturn call queue it to be processed by
1093 split_bbs_on_noreturn_calls during cfg cleanup. */
1094 if (is_gimple_call (stmt)
1095 && gimple_call_noreturn_p (stmt))
1096 vec_safe_push (MODIFIED_NORETURN_CALLS (cfun), stmt);
1097
1098 gcc_assert (gimple_modified_p (stmt));
1099 build_ssa_operands (stmt);
1100 gimple_set_modified (stmt, false);
1101
1102 timevar_pop (TV_TREE_OPS);
1103 }
1104
1105
1106 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1107 to test the validity of the swap operation. */
1108
1109 void
1110 swap_ssa_operands (gimple stmt, tree *exp0, tree *exp1)
1111 {
1112 tree op0, op1;
1113 op0 = *exp0;
1114 op1 = *exp1;
1115
1116 gcc_checking_assert (ssa_operands_active (cfun));
1117
1118 if (op0 != op1)
1119 {
1120 /* Attempt to preserve the relative positions of these two operands in
1121 their * respective immediate use lists by adjusting their use pointer
1122 to point to the new operand position. */
1123 use_optype_p use0, use1, ptr;
1124 use0 = use1 = NULL;
1125
1126 /* Find the 2 operands in the cache, if they are there. */
1127 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1128 if (USE_OP_PTR (ptr)->use == exp0)
1129 {
1130 use0 = ptr;
1131 break;
1132 }
1133
1134 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1135 if (USE_OP_PTR (ptr)->use == exp1)
1136 {
1137 use1 = ptr;
1138 break;
1139 }
1140
1141 /* And adjust their location to point to the new position of the
1142 operand. */
1143 if (use0)
1144 USE_OP_PTR (use0)->use = exp1;
1145 if (use1)
1146 USE_OP_PTR (use1)->use = exp0;
1147
1148 /* Now swap the data. */
1149 *exp0 = op1;
1150 *exp1 = op0;
1151 }
1152 }
1153
1154
1155 /* Scan the immediate_use list for VAR making sure its linked properly.
1156 Return TRUE if there is a problem and emit an error message to F. */
1157
1158 DEBUG_FUNCTION bool
1159 verify_imm_links (FILE *f, tree var)
1160 {
1161 use_operand_p ptr, prev, list;
1162 int count;
1163
1164 gcc_assert (TREE_CODE (var) == SSA_NAME);
1165
1166 list = &(SSA_NAME_IMM_USE_NODE (var));
1167 gcc_assert (list->use == NULL);
1168
1169 if (list->prev == NULL)
1170 {
1171 gcc_assert (list->next == NULL);
1172 return false;
1173 }
1174
1175 prev = list;
1176 count = 0;
1177 for (ptr = list->next; ptr != list; )
1178 {
1179 if (prev != ptr->prev)
1180 goto error;
1181
1182 if (ptr->use == NULL)
1183 goto error; /* 2 roots, or SAFE guard node. */
1184 else if (*(ptr->use) != var)
1185 goto error;
1186
1187 prev = ptr;
1188 ptr = ptr->next;
1189
1190 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1191 problem. */
1192 if (count++ > 50000000)
1193 goto error;
1194 }
1195
1196 /* Verify list in the other direction. */
1197 prev = list;
1198 for (ptr = list->prev; ptr != list; )
1199 {
1200 if (prev != ptr->next)
1201 goto error;
1202 prev = ptr;
1203 ptr = ptr->prev;
1204 if (count-- < 0)
1205 goto error;
1206 }
1207
1208 if (count != 0)
1209 goto error;
1210
1211 return false;
1212
1213 error:
1214 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1215 {
1216 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1217 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1218 }
1219 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1220 (void *)ptr->use);
1221 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1222 fprintf (f, "\n");
1223 return true;
1224 }
1225
1226
1227 /* Dump all the immediate uses to FILE. */
1228
1229 void
1230 dump_immediate_uses_for (FILE *file, tree var)
1231 {
1232 imm_use_iterator iter;
1233 use_operand_p use_p;
1234
1235 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1236
1237 print_generic_expr (file, var, TDF_SLIM);
1238 fprintf (file, " : -->");
1239 if (has_zero_uses (var))
1240 fprintf (file, " no uses.\n");
1241 else
1242 if (has_single_use (var))
1243 fprintf (file, " single use.\n");
1244 else
1245 fprintf (file, "%d uses.\n", num_imm_uses (var));
1246
1247 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1248 {
1249 if (use_p->loc.stmt == NULL && use_p->use == NULL)
1250 fprintf (file, "***end of stmt iterator marker***\n");
1251 else
1252 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1253 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1254 else
1255 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1256 }
1257 fprintf (file, "\n");
1258 }
1259
1260
1261 /* Dump all the immediate uses to FILE. */
1262
1263 void
1264 dump_immediate_uses (FILE *file)
1265 {
1266 tree var;
1267 unsigned int x;
1268
1269 fprintf (file, "Immediate_uses: \n\n");
1270 for (x = 1; x < num_ssa_names; x++)
1271 {
1272 var = ssa_name (x);
1273 if (!var)
1274 continue;
1275 dump_immediate_uses_for (file, var);
1276 }
1277 }
1278
1279
1280 /* Dump def-use edges on stderr. */
1281
1282 DEBUG_FUNCTION void
1283 debug_immediate_uses (void)
1284 {
1285 dump_immediate_uses (stderr);
1286 }
1287
1288
1289 /* Dump def-use edges on stderr. */
1290
1291 DEBUG_FUNCTION void
1292 debug_immediate_uses_for (tree var)
1293 {
1294 dump_immediate_uses_for (stderr, var);
1295 }
1296
1297
1298 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1299
1300 void
1301 unlink_stmt_vdef (gimple stmt)
1302 {
1303 use_operand_p use_p;
1304 imm_use_iterator iter;
1305 gimple use_stmt;
1306 tree vdef = gimple_vdef (stmt);
1307 tree vuse = gimple_vuse (stmt);
1308
1309 if (!vdef
1310 || TREE_CODE (vdef) != SSA_NAME)
1311 return;
1312
1313 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1314 {
1315 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1316 SET_USE (use_p, vuse);
1317 }
1318
1319 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1320 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1321 }
1322
1323
1324 /* Return true if the var whose chain of uses starts at PTR has no
1325 nondebug uses. */
1326 bool
1327 has_zero_uses_1 (const ssa_use_operand_t *head)
1328 {
1329 const ssa_use_operand_t *ptr;
1330
1331 for (ptr = head->next; ptr != head; ptr = ptr->next)
1332 if (!is_gimple_debug (USE_STMT (ptr)))
1333 return false;
1334
1335 return true;
1336 }
1337
1338
1339 /* Return true if the var whose chain of uses starts at PTR has a
1340 single nondebug use. Set USE_P and STMT to that single nondebug
1341 use, if so, or to NULL otherwise. */
1342 bool
1343 single_imm_use_1 (const ssa_use_operand_t *head,
1344 use_operand_p *use_p, gimple *stmt)
1345 {
1346 ssa_use_operand_t *ptr, *single_use = 0;
1347
1348 for (ptr = head->next; ptr != head; ptr = ptr->next)
1349 if (!is_gimple_debug (USE_STMT (ptr)))
1350 {
1351 if (single_use)
1352 {
1353 single_use = NULL;
1354 break;
1355 }
1356 single_use = ptr;
1357 }
1358
1359 if (use_p)
1360 *use_p = single_use;
1361
1362 if (stmt)
1363 *stmt = single_use ? single_use->loc.stmt : NULL;
1364
1365 return single_use;
1366 }