]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-operands.c
This patch rewrites the old VEC macro-based interface into a new one
[thirdparty/gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 2011, 2012
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "function.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "timevar.h"
33 #include "dumpfile.h"
34 #include "ggc.h"
35 #include "timevar.h"
36 #include "langhooks.h"
37 #include "diagnostic-core.h"
38
39
40 /* This file contains the code required to manage the operands cache of the
41 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
42 annotation. This cache contains operands that will be of interest to
43 optimizers and other passes wishing to manipulate the IL.
44
45 The operand type are broken up into REAL and VIRTUAL operands. The real
46 operands are represented as pointers into the stmt's operand tree. Thus
47 any manipulation of the real operands will be reflected in the actual tree.
48 Virtual operands are represented solely in the cache, although the base
49 variable for the SSA_NAME may, or may not occur in the stmt's tree.
50 Manipulation of the virtual operands will not be reflected in the stmt tree.
51
52 The routines in this file are concerned with creating this operand cache
53 from a stmt tree.
54
55 The operand tree is the parsed by the various get_* routines which look
56 through the stmt tree for the occurrence of operands which may be of
57 interest, and calls are made to the append_* routines whenever one is
58 found. There are 4 of these routines, each representing one of the
59 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
60
61 The append_* routines check for duplication, and simply keep a list of
62 unique objects for each operand type in the build_* extendable vectors.
63
64 Once the stmt tree is completely parsed, the finalize_ssa_operands()
65 routine is called, which proceeds to perform the finalization routine
66 on each of the 4 operand vectors which have been built up.
67
68 If the stmt had a previous operand cache, the finalization routines
69 attempt to match up the new operands with the old ones. If it's a perfect
70 match, the old vector is simply reused. If it isn't a perfect match, then
71 a new vector is created and the new operands are placed there. For
72 virtual operands, if the previous cache had SSA_NAME version of a
73 variable, and that same variable occurs in the same operands cache, then
74 the new cache vector will also get the same SSA_NAME.
75
76 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
77 operand vector for VUSE, then the new vector will also be modified
78 such that it contains 'a_5' rather than 'a'. */
79
80
81 /* Flags to describe operand properties in helpers. */
82
83 /* By default, operands are loaded. */
84 #define opf_use 0
85
86 /* Operand is the target of an assignment expression or a
87 call-clobbered variable. */
88 #define opf_def (1 << 0)
89
90 /* No virtual operands should be created in the expression. This is used
91 when traversing ADDR_EXPR nodes which have different semantics than
92 other expressions. Inside an ADDR_EXPR node, the only operands that we
93 need to consider are indices into arrays. For instance, &a.b[i] should
94 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
95 VUSE for 'b'. */
96 #define opf_no_vops (1 << 1)
97
98 /* Operand is an implicit reference. This is used to distinguish
99 explicit assignments in the form of MODIFY_EXPR from
100 clobbering sites like function calls or ASM_EXPRs. */
101 #define opf_implicit (1 << 2)
102
103 /* Operand is in a place where address-taken does not imply addressable. */
104 #define opf_non_addressable (1 << 3)
105
106 /* Operand is in a place where opf_non_addressable does not apply. */
107 #define opf_not_non_addressable (1 << 4)
108
109 /* Array for building all the use operands. */
110 static vec<tree> build_uses;
111
112 /* The built VDEF operand. */
113 static tree build_vdef;
114
115 /* The built VUSE operand. */
116 static tree build_vuse;
117
118 /* Bitmap obstack for our datastructures that needs to survive across
119 compilations of multiple functions. */
120 static bitmap_obstack operands_bitmap_obstack;
121
122 static void get_expr_operands (gimple, tree *, int);
123
124 /* Number of functions with initialized ssa_operands. */
125 static int n_initialized = 0;
126
127
128 /* Return true if the SSA operands cache is active. */
129
130 bool
131 ssa_operands_active (struct function *fun)
132 {
133 if (fun == NULL)
134 return false;
135
136 return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
137 }
138
139
140 /* Create the VOP variable, an artificial global variable to act as a
141 representative of all of the virtual operands FUD chain. */
142
143 static void
144 create_vop_var (struct function *fn)
145 {
146 tree global_var;
147
148 gcc_assert (fn->gimple_df->vop == NULL_TREE);
149
150 global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
151 get_identifier (".MEM"),
152 void_type_node);
153 DECL_ARTIFICIAL (global_var) = 1;
154 TREE_READONLY (global_var) = 0;
155 DECL_EXTERNAL (global_var) = 1;
156 TREE_STATIC (global_var) = 1;
157 TREE_USED (global_var) = 1;
158 DECL_CONTEXT (global_var) = NULL_TREE;
159 TREE_THIS_VOLATILE (global_var) = 0;
160 TREE_ADDRESSABLE (global_var) = 0;
161 VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
162
163 fn->gimple_df->vop = global_var;
164 }
165
166 /* These are the sizes of the operand memory buffer in bytes which gets
167 allocated each time more operands space is required. The final value is
168 the amount that is allocated every time after that.
169 In 1k we can fit 25 use operands (or 63 def operands) on a host with
170 8 byte pointers, that would be 10 statements each with 1 def and 2
171 uses. */
172
173 #define OP_SIZE_INIT 0
174 #define OP_SIZE_1 (1024 - sizeof (void *))
175 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
176 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
177
178 /* Initialize the operand cache routines. */
179
180 void
181 init_ssa_operands (struct function *fn)
182 {
183 if (!n_initialized++)
184 {
185 build_uses.create (10);
186 build_vuse = NULL_TREE;
187 build_vdef = NULL_TREE;
188 bitmap_obstack_initialize (&operands_bitmap_obstack);
189 }
190
191 gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
192 gimple_ssa_operands (fn)->operand_memory_index
193 = gimple_ssa_operands (fn)->ssa_operand_mem_size;
194 gimple_ssa_operands (fn)->ops_active = true;
195 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
196 create_vop_var (fn);
197 }
198
199
200 /* Dispose of anything required by the operand routines. */
201
202 void
203 fini_ssa_operands (void)
204 {
205 struct ssa_operand_memory_d *ptr;
206
207 if (!--n_initialized)
208 {
209 build_uses.release ();
210 build_vdef = NULL_TREE;
211 build_vuse = NULL_TREE;
212 }
213
214 gimple_ssa_operands (cfun)->free_uses = NULL;
215
216 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
217 {
218 gimple_ssa_operands (cfun)->operand_memory
219 = gimple_ssa_operands (cfun)->operand_memory->next;
220 ggc_free (ptr);
221 }
222
223 gimple_ssa_operands (cfun)->ops_active = false;
224
225 if (!n_initialized)
226 bitmap_obstack_release (&operands_bitmap_obstack);
227
228 cfun->gimple_df->vop = NULL_TREE;
229 }
230
231
232 /* Return memory for an operand of size SIZE. */
233
234 static inline void *
235 ssa_operand_alloc (unsigned size)
236 {
237 char *ptr;
238
239 gcc_assert (size == sizeof (struct use_optype_d));
240
241 if (gimple_ssa_operands (cfun)->operand_memory_index + size
242 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
243 {
244 struct ssa_operand_memory_d *ptr;
245
246 switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
247 {
248 case OP_SIZE_INIT:
249 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
250 break;
251 case OP_SIZE_1:
252 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
253 break;
254 case OP_SIZE_2:
255 case OP_SIZE_3:
256 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
257 break;
258 default:
259 gcc_unreachable ();
260 }
261
262
263 ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *)
264 + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
265
266 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
267 gimple_ssa_operands (cfun)->operand_memory = ptr;
268 gimple_ssa_operands (cfun)->operand_memory_index = 0;
269 }
270
271 ptr = &(gimple_ssa_operands (cfun)->operand_memory
272 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
273 gimple_ssa_operands (cfun)->operand_memory_index += size;
274 return ptr;
275 }
276
277
278 /* Allocate a USE operand. */
279
280 static inline struct use_optype_d *
281 alloc_use (void)
282 {
283 struct use_optype_d *ret;
284 if (gimple_ssa_operands (cfun)->free_uses)
285 {
286 ret = gimple_ssa_operands (cfun)->free_uses;
287 gimple_ssa_operands (cfun)->free_uses
288 = gimple_ssa_operands (cfun)->free_uses->next;
289 }
290 else
291 ret = (struct use_optype_d *)
292 ssa_operand_alloc (sizeof (struct use_optype_d));
293 return ret;
294 }
295
296
297 /* Adds OP to the list of uses of statement STMT after LAST. */
298
299 static inline use_optype_p
300 add_use_op (gimple stmt, tree *op, use_optype_p last)
301 {
302 use_optype_p new_use;
303
304 new_use = alloc_use ();
305 USE_OP_PTR (new_use)->use = op;
306 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
307 last->next = new_use;
308 new_use->next = NULL;
309 return new_use;
310 }
311
312
313
314 /* Takes elements from build_defs and turns them into def operands of STMT.
315 TODO -- Make build_defs vec of tree *. */
316
317 static inline void
318 finalize_ssa_defs (gimple stmt)
319 {
320 /* Pre-pend the vdef we may have built. */
321 if (build_vdef != NULL_TREE)
322 {
323 tree oldvdef = gimple_vdef (stmt);
324 if (oldvdef
325 && TREE_CODE (oldvdef) == SSA_NAME)
326 oldvdef = SSA_NAME_VAR (oldvdef);
327 if (oldvdef != build_vdef)
328 gimple_set_vdef (stmt, build_vdef);
329 }
330
331 /* Clear and unlink a no longer necessary VDEF. */
332 if (build_vdef == NULL_TREE
333 && gimple_vdef (stmt) != NULL_TREE)
334 {
335 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
336 {
337 unlink_stmt_vdef (stmt);
338 release_ssa_name (gimple_vdef (stmt));
339 }
340 gimple_set_vdef (stmt, NULL_TREE);
341 }
342
343 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
344 if (gimple_vdef (stmt)
345 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
346 {
347 cfun->gimple_df->rename_vops = 1;
348 cfun->gimple_df->ssa_renaming_needed = 1;
349 }
350 }
351
352
353 /* Takes elements from build_uses and turns them into use operands of STMT.
354 TODO -- Make build_uses vec of tree *. */
355
356 static inline void
357 finalize_ssa_uses (gimple stmt)
358 {
359 unsigned new_i;
360 struct use_optype_d new_list;
361 use_optype_p old_ops, ptr, last;
362
363 /* Pre-pend the VUSE we may have built. */
364 if (build_vuse != NULL_TREE)
365 {
366 tree oldvuse = gimple_vuse (stmt);
367 if (oldvuse
368 && TREE_CODE (oldvuse) == SSA_NAME)
369 oldvuse = SSA_NAME_VAR (oldvuse);
370 if (oldvuse != (build_vuse != NULL_TREE
371 ? build_vuse : build_vdef))
372 gimple_set_vuse (stmt, NULL_TREE);
373 build_uses.safe_insert (0, (tree)gimple_vuse_ptr (stmt));
374 }
375
376 new_list.next = NULL;
377 last = &new_list;
378
379 old_ops = gimple_use_ops (stmt);
380
381 /* Clear a no longer necessary VUSE. */
382 if (build_vuse == NULL_TREE
383 && gimple_vuse (stmt) != NULL_TREE)
384 gimple_set_vuse (stmt, NULL_TREE);
385
386 /* If there is anything in the old list, free it. */
387 if (old_ops)
388 {
389 for (ptr = old_ops; ptr; ptr = ptr->next)
390 delink_imm_use (USE_OP_PTR (ptr));
391 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
392 gimple_ssa_operands (cfun)->free_uses = old_ops;
393 }
394
395 /* If we added a VUSE, make sure to set the operand if it is not already
396 present and mark it for renaming. */
397 if (build_vuse != NULL_TREE
398 && gimple_vuse (stmt) == NULL_TREE)
399 {
400 gimple_set_vuse (stmt, gimple_vop (cfun));
401 cfun->gimple_df->rename_vops = 1;
402 cfun->gimple_df->ssa_renaming_needed = 1;
403 }
404
405 /* Now create nodes for all the new nodes. */
406 for (new_i = 0; new_i < build_uses.length (); new_i++)
407 {
408 tree *op = (tree *) build_uses[new_i];
409 last = add_use_op (stmt, op, last);
410 }
411
412 /* Now set the stmt's operands. */
413 gimple_set_use_ops (stmt, new_list.next);
414 }
415
416
417 /* Clear the in_list bits and empty the build array for VDEFs and
418 VUSEs. */
419
420 static inline void
421 cleanup_build_arrays (void)
422 {
423 build_vdef = NULL_TREE;
424 build_vuse = NULL_TREE;
425 build_uses.truncate (0);
426 }
427
428
429 /* Finalize all the build vectors, fill the new ones into INFO. */
430
431 static inline void
432 finalize_ssa_stmt_operands (gimple stmt)
433 {
434 finalize_ssa_defs (stmt);
435 finalize_ssa_uses (stmt);
436 cleanup_build_arrays ();
437 }
438
439
440 /* Start the process of building up operands vectors in INFO. */
441
442 static inline void
443 start_ssa_stmt_operands (void)
444 {
445 gcc_assert (build_uses.length () == 0);
446 gcc_assert (build_vuse == NULL_TREE);
447 gcc_assert (build_vdef == NULL_TREE);
448 }
449
450
451 /* Add USE_P to the list of pointers to operands. */
452
453 static inline void
454 append_use (tree *use_p)
455 {
456 build_uses.safe_push ((tree) use_p);
457 }
458
459
460 /* Add VAR to the set of variables that require a VDEF operator. */
461
462 static inline void
463 append_vdef (tree var)
464 {
465 if (!optimize)
466 return;
467
468 gcc_assert ((build_vdef == NULL_TREE
469 || build_vdef == var)
470 && (build_vuse == NULL_TREE
471 || build_vuse == var));
472
473 build_vdef = var;
474 build_vuse = var;
475 }
476
477
478 /* Add VAR to the set of variables that require a VUSE operator. */
479
480 static inline void
481 append_vuse (tree var)
482 {
483 if (!optimize)
484 return;
485
486 gcc_assert (build_vuse == NULL_TREE
487 || build_vuse == var);
488
489 build_vuse = var;
490 }
491
492 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
493
494 static void
495 add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
496 {
497 /* Add virtual operands to the stmt, unless the caller has specifically
498 requested not to do that (used when adding operands inside an
499 ADDR_EXPR expression). */
500 if (flags & opf_no_vops)
501 return;
502
503 gcc_assert (!is_gimple_debug (stmt));
504
505 if (flags & opf_def)
506 append_vdef (gimple_vop (cfun));
507 else
508 append_vuse (gimple_vop (cfun));
509 }
510
511
512 /* Add *VAR_P to the appropriate operand array for statement STMT.
513 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
514 it will be added to the statement's real operands, otherwise it is
515 added to virtual operands. */
516
517 static void
518 add_stmt_operand (tree *var_p, gimple stmt, int flags)
519 {
520 tree var = *var_p;
521
522 gcc_assert (SSA_VAR_P (*var_p));
523
524 if (is_gimple_reg (var))
525 {
526 /* The variable is a GIMPLE register. Add it to real operands. */
527 if (flags & opf_def)
528 ;
529 else
530 append_use (var_p);
531 if (DECL_P (*var_p))
532 cfun->gimple_df->ssa_renaming_needed = 1;
533 }
534 else
535 {
536 /* Mark statements with volatile operands. */
537 if (!(flags & opf_no_vops)
538 && TREE_THIS_VOLATILE (var))
539 gimple_set_has_volatile_ops (stmt, true);
540
541 /* The variable is a memory access. Add virtual operands. */
542 add_virtual_operand (stmt, flags);
543 }
544 }
545
546 /* Mark the base address of REF as having its address taken.
547 REF may be a single variable whose address has been taken or any
548 other valid GIMPLE memory reference (structure reference, array,
549 etc). */
550
551 static void
552 mark_address_taken (tree ref)
553 {
554 tree var;
555
556 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
557 as the only thing we take the address of. If VAR is a structure,
558 taking the address of a field means that the whole structure may
559 be referenced using pointer arithmetic. See PR 21407 and the
560 ensuing mailing list discussion. */
561 var = get_base_address (ref);
562 if (var)
563 {
564 if (DECL_P (var))
565 TREE_ADDRESSABLE (var) = 1;
566 else if (TREE_CODE (var) == MEM_REF
567 && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
568 && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
569 TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
570 }
571 }
572
573
574 /* A subroutine of get_expr_operands to handle MEM_REF.
575
576 STMT is the statement being processed, EXPR is the MEM_REF
577 that got us here.
578
579 FLAGS is as in get_expr_operands. */
580
581 static void
582 get_indirect_ref_operands (gimple stmt, tree expr, int flags)
583 {
584 tree *pptr = &TREE_OPERAND (expr, 0);
585
586 if (!(flags & opf_no_vops)
587 && TREE_THIS_VOLATILE (expr))
588 gimple_set_has_volatile_ops (stmt, true);
589
590 /* Add the VOP. */
591 add_virtual_operand (stmt, flags);
592
593 /* If requested, add a USE operand for the base pointer. */
594 get_expr_operands (stmt, pptr,
595 opf_non_addressable | opf_use
596 | (flags & (opf_no_vops|opf_not_non_addressable)));
597 }
598
599
600 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
601
602 static void
603 get_tmr_operands (gimple stmt, tree expr, int flags)
604 {
605 if (!(flags & opf_no_vops)
606 && TREE_THIS_VOLATILE (expr))
607 gimple_set_has_volatile_ops (stmt, true);
608
609 /* First record the real operands. */
610 get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
611 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
612 get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
613
614 add_virtual_operand (stmt, flags);
615 }
616
617
618 /* If STMT is a call that may clobber globals and other symbols that
619 escape, add them to the VDEF/VUSE lists for it. */
620
621 static void
622 maybe_add_call_vops (gimple stmt)
623 {
624 int call_flags = gimple_call_flags (stmt);
625
626 /* If aliases have been computed already, add VDEF or VUSE
627 operands for all the symbols that have been found to be
628 call-clobbered. */
629 if (!(call_flags & ECF_NOVOPS))
630 {
631 /* A 'pure' or a 'const' function never call-clobbers anything.
632 A 'noreturn' function might, but since we don't return anyway
633 there is no point in recording that. */
634 if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
635 add_virtual_operand (stmt, opf_def);
636 else if (!(call_flags & ECF_CONST))
637 add_virtual_operand (stmt, opf_use);
638 }
639 }
640
641
642 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
643
644 static void
645 get_asm_expr_operands (gimple stmt)
646 {
647 size_t i, noutputs;
648 const char **oconstraints;
649 const char *constraint;
650 bool allows_mem, allows_reg, is_inout;
651
652 noutputs = gimple_asm_noutputs (stmt);
653 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
654
655 /* Gather all output operands. */
656 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
657 {
658 tree link = gimple_asm_output_op (stmt, i);
659 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
660 oconstraints[i] = constraint;
661 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
662 &allows_reg, &is_inout);
663
664 /* This should have been split in gimplify_asm_expr. */
665 gcc_assert (!allows_reg || !is_inout);
666
667 /* Memory operands are addressable. Note that STMT needs the
668 address of this operand. */
669 if (!allows_reg && allows_mem)
670 mark_address_taken (TREE_VALUE (link));
671
672 get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable);
673 }
674
675 /* Gather all input operands. */
676 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
677 {
678 tree link = gimple_asm_input_op (stmt, i);
679 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
680 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
681 &allows_mem, &allows_reg);
682
683 /* Memory operands are addressable. Note that STMT needs the
684 address of this operand. */
685 if (!allows_reg && allows_mem)
686 mark_address_taken (TREE_VALUE (link));
687
688 get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable);
689 }
690
691 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
692 if (gimple_asm_clobbers_memory_p (stmt))
693 add_virtual_operand (stmt, opf_def);
694 }
695
696
697 /* Recursively scan the expression pointed to by EXPR_P in statement
698 STMT. FLAGS is one of the OPF_* constants modifying how to
699 interpret the operands found. */
700
701 static void
702 get_expr_operands (gimple stmt, tree *expr_p, int flags)
703 {
704 enum tree_code code;
705 enum tree_code_class codeclass;
706 tree expr = *expr_p;
707 int uflags = opf_use;
708
709 if (expr == NULL)
710 return;
711
712 if (is_gimple_debug (stmt))
713 uflags |= (flags & opf_no_vops);
714
715 code = TREE_CODE (expr);
716 codeclass = TREE_CODE_CLASS (code);
717
718 switch (code)
719 {
720 case ADDR_EXPR:
721 /* Taking the address of a variable does not represent a
722 reference to it, but the fact that the statement takes its
723 address will be of interest to some passes (e.g. alias
724 resolution). */
725 if ((!(flags & opf_non_addressable)
726 || (flags & opf_not_non_addressable))
727 && !is_gimple_debug (stmt))
728 mark_address_taken (TREE_OPERAND (expr, 0));
729
730 /* If the address is invariant, there may be no interesting
731 variable references inside. */
732 if (is_gimple_min_invariant (expr))
733 return;
734
735 /* Otherwise, there may be variables referenced inside but there
736 should be no VUSEs created, since the referenced objects are
737 not really accessed. The only operands that we should find
738 here are ARRAY_REF indices which will always be real operands
739 (GIMPLE does not allow non-registers as array indices). */
740 flags |= opf_no_vops;
741 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
742 flags | opf_not_non_addressable);
743 return;
744
745 case SSA_NAME:
746 case VAR_DECL:
747 case PARM_DECL:
748 case RESULT_DECL:
749 add_stmt_operand (expr_p, stmt, flags);
750 return;
751
752 case DEBUG_EXPR_DECL:
753 gcc_assert (gimple_debug_bind_p (stmt));
754 return;
755
756 case MEM_REF:
757 get_indirect_ref_operands (stmt, expr, flags);
758 return;
759
760 case TARGET_MEM_REF:
761 get_tmr_operands (stmt, expr, flags);
762 return;
763
764 case ARRAY_REF:
765 case ARRAY_RANGE_REF:
766 case COMPONENT_REF:
767 case REALPART_EXPR:
768 case IMAGPART_EXPR:
769 {
770 if (!(flags & opf_no_vops)
771 && TREE_THIS_VOLATILE (expr))
772 gimple_set_has_volatile_ops (stmt, true);
773
774 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
775
776 if (code == COMPONENT_REF)
777 {
778 if (!(flags & opf_no_vops)
779 && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
780 gimple_set_has_volatile_ops (stmt, true);
781 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
782 }
783 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
784 {
785 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
786 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
787 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags);
788 }
789
790 return;
791 }
792
793 case WITH_SIZE_EXPR:
794 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
795 and an rvalue reference to its second argument. */
796 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
797 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
798 return;
799
800 case COND_EXPR:
801 case VEC_COND_EXPR:
802 case VEC_PERM_EXPR:
803 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
804 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
805 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
806 return;
807
808 case CONSTRUCTOR:
809 {
810 /* General aggregate CONSTRUCTORs have been decomposed, but they
811 are still in use as the COMPLEX_EXPR equivalent for vectors. */
812 constructor_elt *ce;
813 unsigned HOST_WIDE_INT idx;
814
815 /* A volatile constructor is actually TREE_CLOBBER_P, transfer
816 the volatility to the statement, don't use TREE_CLOBBER_P for
817 mirroring the other uses of THIS_VOLATILE in this file. */
818 if (!(flags & opf_no_vops)
819 && TREE_THIS_VOLATILE (expr))
820 gimple_set_has_volatile_ops (stmt, true);
821
822 for (idx = 0;
823 vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
824 idx++)
825 get_expr_operands (stmt, &ce->value, uflags);
826
827 return;
828 }
829
830 case BIT_FIELD_REF:
831 if (!(flags & opf_no_vops)
832 && TREE_THIS_VOLATILE (expr))
833 gimple_set_has_volatile_ops (stmt, true);
834 /* FALLTHRU */
835
836 case VIEW_CONVERT_EXPR:
837 do_unary:
838 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
839 return;
840
841 case COMPOUND_EXPR:
842 case OBJ_TYPE_REF:
843 case ASSERT_EXPR:
844 do_binary:
845 {
846 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
847 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
848 return;
849 }
850
851 case DOT_PROD_EXPR:
852 case REALIGN_LOAD_EXPR:
853 case WIDEN_MULT_PLUS_EXPR:
854 case WIDEN_MULT_MINUS_EXPR:
855 case FMA_EXPR:
856 {
857 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
858 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
859 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
860 return;
861 }
862
863 case FUNCTION_DECL:
864 case LABEL_DECL:
865 case CONST_DECL:
866 case CASE_LABEL_EXPR:
867 /* Expressions that make no memory references. */
868 return;
869
870 default:
871 if (codeclass == tcc_unary)
872 goto do_unary;
873 if (codeclass == tcc_binary || codeclass == tcc_comparison)
874 goto do_binary;
875 if (codeclass == tcc_constant || codeclass == tcc_type)
876 return;
877 }
878
879 /* If we get here, something has gone wrong. */
880 #ifdef ENABLE_CHECKING
881 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
882 debug_tree (expr);
883 fputs ("\n", stderr);
884 #endif
885 gcc_unreachable ();
886 }
887
888
889 /* Parse STMT looking for operands. When finished, the various
890 build_* operand vectors will have potential operands in them. */
891
892 static void
893 parse_ssa_operands (gimple stmt)
894 {
895 enum gimple_code code = gimple_code (stmt);
896 size_t i, n, start = 0;
897
898 switch (code)
899 {
900 case GIMPLE_ASM:
901 get_asm_expr_operands (stmt);
902 break;
903
904 case GIMPLE_TRANSACTION:
905 /* The start of a transaction is a memory barrier. */
906 add_virtual_operand (stmt, opf_def | opf_use);
907 break;
908
909 case GIMPLE_DEBUG:
910 if (gimple_debug_bind_p (stmt)
911 && gimple_debug_bind_has_value_p (stmt))
912 get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt),
913 opf_use | opf_no_vops);
914 break;
915
916 case GIMPLE_RETURN:
917 append_vuse (gimple_vop (cfun));
918 goto do_default;
919
920 case GIMPLE_CALL:
921 /* Add call-clobbered operands, if needed. */
922 maybe_add_call_vops (stmt);
923 /* FALLTHRU */
924
925 case GIMPLE_ASSIGN:
926 get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
927 start = 1;
928 /* FALLTHRU */
929
930 default:
931 do_default:
932 n = gimple_num_ops (stmt);
933 for (i = start; i < n; i++)
934 get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
935 break;
936 }
937 }
938
939
940 /* Create an operands cache for STMT. */
941
942 static void
943 build_ssa_operands (gimple stmt)
944 {
945 /* Initially assume that the statement has no volatile operands. */
946 gimple_set_has_volatile_ops (stmt, false);
947
948 start_ssa_stmt_operands ();
949 parse_ssa_operands (stmt);
950 finalize_ssa_stmt_operands (stmt);
951 }
952
953 /* Verifies SSA statement operands. */
954
955 DEBUG_FUNCTION bool
956 verify_ssa_operands (gimple stmt)
957 {
958 use_operand_p use_p;
959 def_operand_p def_p;
960 ssa_op_iter iter;
961 unsigned i;
962 tree use, def;
963 bool volatile_p = gimple_has_volatile_ops (stmt);
964
965 /* build_ssa_operands w/o finalizing them. */
966 gimple_set_has_volatile_ops (stmt, false);
967 start_ssa_stmt_operands ();
968 parse_ssa_operands (stmt);
969
970 /* Now verify the built operands are the same as present in STMT. */
971 def = gimple_vdef (stmt);
972 if (def
973 && TREE_CODE (def) == SSA_NAME)
974 def = SSA_NAME_VAR (def);
975 if (build_vdef != def)
976 {
977 error ("virtual definition of statement not up-to-date");
978 return true;
979 }
980 if (gimple_vdef (stmt)
981 && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
982 || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
983 {
984 error ("virtual def operand missing for stmt");
985 return true;
986 }
987
988 use = gimple_vuse (stmt);
989 if (use
990 && TREE_CODE (use) == SSA_NAME)
991 use = SSA_NAME_VAR (use);
992 if (build_vuse != use)
993 {
994 error ("virtual use of statement not up-to-date");
995 return true;
996 }
997 if (gimple_vuse (stmt)
998 && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
999 || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
1000 {
1001 error ("virtual use operand missing for stmt");
1002 return true;
1003 }
1004
1005 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1006 {
1007 FOR_EACH_VEC_ELT (build_uses, i, use)
1008 {
1009 if (use_p->use == (tree *)use)
1010 {
1011 build_uses[i] = NULL_TREE;
1012 break;
1013 }
1014 }
1015 if (i == build_uses.length ())
1016 {
1017 error ("excess use operand for stmt");
1018 debug_generic_expr (USE_FROM_PTR (use_p));
1019 return true;
1020 }
1021 }
1022 FOR_EACH_VEC_ELT (build_uses, i, use)
1023 if (use != NULL_TREE)
1024 {
1025 error ("use operand missing for stmt");
1026 debug_generic_expr (*(tree *)use);
1027 return true;
1028 }
1029
1030 if (gimple_has_volatile_ops (stmt) != volatile_p)
1031 {
1032 error ("stmt volatile flag not up-to-date");
1033 return true;
1034 }
1035
1036 cleanup_build_arrays ();
1037 return false;
1038 }
1039
1040
1041 /* Releases the operands of STMT back to their freelists, and clears
1042 the stmt operand lists. */
1043
1044 void
1045 free_stmt_operands (gimple stmt)
1046 {
1047 use_optype_p uses = gimple_use_ops (stmt), last_use;
1048
1049 if (uses)
1050 {
1051 for (last_use = uses; last_use->next; last_use = last_use->next)
1052 delink_imm_use (USE_OP_PTR (last_use));
1053 delink_imm_use (USE_OP_PTR (last_use));
1054 last_use->next = gimple_ssa_operands (cfun)->free_uses;
1055 gimple_ssa_operands (cfun)->free_uses = uses;
1056 gimple_set_use_ops (stmt, NULL);
1057 }
1058
1059 if (gimple_has_mem_ops (stmt))
1060 {
1061 gimple_set_vuse (stmt, NULL_TREE);
1062 gimple_set_vdef (stmt, NULL_TREE);
1063 }
1064 }
1065
1066
1067 /* Get the operands of statement STMT. */
1068
1069 void
1070 update_stmt_operands (gimple stmt)
1071 {
1072 /* If update_stmt_operands is called before SSA is initialized, do
1073 nothing. */
1074 if (!ssa_operands_active (cfun))
1075 return;
1076
1077 timevar_push (TV_TREE_OPS);
1078
1079 /* If the stmt is a noreturn call queue it to be processed by
1080 split_bbs_on_noreturn_calls during cfg cleanup. */
1081 if (is_gimple_call (stmt)
1082 && gimple_call_noreturn_p (stmt))
1083 vec_safe_push (MODIFIED_NORETURN_CALLS (cfun), stmt);
1084
1085 gcc_assert (gimple_modified_p (stmt));
1086 build_ssa_operands (stmt);
1087 gimple_set_modified (stmt, false);
1088
1089 timevar_pop (TV_TREE_OPS);
1090 }
1091
1092
1093 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1094 to test the validity of the swap operation. */
1095
1096 void
1097 swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
1098 {
1099 tree op0, op1;
1100 op0 = *exp0;
1101 op1 = *exp1;
1102
1103 /* If the operand cache is active, attempt to preserve the relative
1104 positions of these two operands in their respective immediate use
1105 lists by adjusting their use pointer to point to the new
1106 operand position. */
1107 if (ssa_operands_active (cfun) && op0 != op1)
1108 {
1109 use_optype_p use0, use1, ptr;
1110 use0 = use1 = NULL;
1111
1112 /* Find the 2 operands in the cache, if they are there. */
1113 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1114 if (USE_OP_PTR (ptr)->use == exp0)
1115 {
1116 use0 = ptr;
1117 break;
1118 }
1119
1120 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1121 if (USE_OP_PTR (ptr)->use == exp1)
1122 {
1123 use1 = ptr;
1124 break;
1125 }
1126
1127 /* And adjust their location to point to the new position of the
1128 operand. */
1129 if (use0)
1130 USE_OP_PTR (use0)->use = exp1;
1131 if (use1)
1132 USE_OP_PTR (use1)->use = exp0;
1133 }
1134
1135 /* Now swap the data. */
1136 *exp0 = op1;
1137 *exp1 = op0;
1138 }
1139
1140
1141 /* Scan the immediate_use list for VAR making sure its linked properly.
1142 Return TRUE if there is a problem and emit an error message to F. */
1143
1144 DEBUG_FUNCTION bool
1145 verify_imm_links (FILE *f, tree var)
1146 {
1147 use_operand_p ptr, prev, list;
1148 int count;
1149
1150 gcc_assert (TREE_CODE (var) == SSA_NAME);
1151
1152 list = &(SSA_NAME_IMM_USE_NODE (var));
1153 gcc_assert (list->use == NULL);
1154
1155 if (list->prev == NULL)
1156 {
1157 gcc_assert (list->next == NULL);
1158 return false;
1159 }
1160
1161 prev = list;
1162 count = 0;
1163 for (ptr = list->next; ptr != list; )
1164 {
1165 if (prev != ptr->prev)
1166 goto error;
1167
1168 if (ptr->use == NULL)
1169 goto error; /* 2 roots, or SAFE guard node. */
1170 else if (*(ptr->use) != var)
1171 goto error;
1172
1173 prev = ptr;
1174 ptr = ptr->next;
1175
1176 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1177 problem. */
1178 if (count++ > 50000000)
1179 goto error;
1180 }
1181
1182 /* Verify list in the other direction. */
1183 prev = list;
1184 for (ptr = list->prev; ptr != list; )
1185 {
1186 if (prev != ptr->next)
1187 goto error;
1188 prev = ptr;
1189 ptr = ptr->prev;
1190 if (count-- < 0)
1191 goto error;
1192 }
1193
1194 if (count != 0)
1195 goto error;
1196
1197 return false;
1198
1199 error:
1200 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1201 {
1202 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1203 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1204 }
1205 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1206 (void *)ptr->use);
1207 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1208 fprintf(f, "\n");
1209 return true;
1210 }
1211
1212
1213 /* Dump all the immediate uses to FILE. */
1214
1215 void
1216 dump_immediate_uses_for (FILE *file, tree var)
1217 {
1218 imm_use_iterator iter;
1219 use_operand_p use_p;
1220
1221 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1222
1223 print_generic_expr (file, var, TDF_SLIM);
1224 fprintf (file, " : -->");
1225 if (has_zero_uses (var))
1226 fprintf (file, " no uses.\n");
1227 else
1228 if (has_single_use (var))
1229 fprintf (file, " single use.\n");
1230 else
1231 fprintf (file, "%d uses.\n", num_imm_uses (var));
1232
1233 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1234 {
1235 if (use_p->loc.stmt == NULL && use_p->use == NULL)
1236 fprintf (file, "***end of stmt iterator marker***\n");
1237 else
1238 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1239 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1240 else
1241 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1242 }
1243 fprintf(file, "\n");
1244 }
1245
1246
1247 /* Dump all the immediate uses to FILE. */
1248
1249 void
1250 dump_immediate_uses (FILE *file)
1251 {
1252 tree var;
1253 unsigned int x;
1254
1255 fprintf (file, "Immediate_uses: \n\n");
1256 for (x = 1; x < num_ssa_names; x++)
1257 {
1258 var = ssa_name(x);
1259 if (!var)
1260 continue;
1261 dump_immediate_uses_for (file, var);
1262 }
1263 }
1264
1265
1266 /* Dump def-use edges on stderr. */
1267
1268 DEBUG_FUNCTION void
1269 debug_immediate_uses (void)
1270 {
1271 dump_immediate_uses (stderr);
1272 }
1273
1274
1275 /* Dump def-use edges on stderr. */
1276
1277 DEBUG_FUNCTION void
1278 debug_immediate_uses_for (tree var)
1279 {
1280 dump_immediate_uses_for (stderr, var);
1281 }
1282
1283
1284 /* Return true if OP, an SSA name or a DECL is a virtual operand. */
1285
1286 bool
1287 virtual_operand_p (tree op)
1288 {
1289 if (TREE_CODE (op) == SSA_NAME)
1290 {
1291 op = SSA_NAME_VAR (op);
1292 if (!op)
1293 return false;
1294 }
1295
1296 if (TREE_CODE (op) == VAR_DECL)
1297 return VAR_DECL_IS_VIRTUAL_OPERAND (op);
1298
1299 return false;
1300 }
1301
1302 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1303
1304 void
1305 unlink_stmt_vdef (gimple stmt)
1306 {
1307 use_operand_p use_p;
1308 imm_use_iterator iter;
1309 gimple use_stmt;
1310 tree vdef = gimple_vdef (stmt);
1311 tree vuse = gimple_vuse (stmt);
1312
1313 if (!vdef
1314 || TREE_CODE (vdef) != SSA_NAME)
1315 return;
1316
1317 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1318 {
1319 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1320 SET_USE (use_p, vuse);
1321 }
1322
1323 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1324 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1325 }
1326