]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-structalias.c
[multiple changes]
[thirdparty/gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "obstack.h"
28 #include "bitmap.h"
29 #include "flags.h"
30 #include "rtl.h"
31 #include "tm_p.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "output.h"
35 #include "errors.h"
36 #include "diagnostic.h"
37 #include "tree.h"
38 #include "c-common.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "varray.h"
42 #include "c-tree.h"
43 #include "tree-gimple.h"
44 #include "hashtab.h"
45 #include "function.h"
46 #include "cgraph.h"
47 #include "tree-pass.h"
48 #include "timevar.h"
49 #include "alloc-pool.h"
50 #include "splay-tree.h"
51 #include "params.h"
52 #include "tree-ssa-structalias.h"
53 #include "cgraph.h"
54 #include "alias.h"
55 #include "pointer-set.h"
56
57 /* The idea behind this analyzer is to generate set constraints from the
58 program, then solve the resulting constraints in order to generate the
59 points-to sets.
60
61 Set constraints are a way of modeling program analysis problems that
62 involve sets. They consist of an inclusion constraint language,
63 describing the variables (each variable is a set) and operations that
64 are involved on the variables, and a set of rules that derive facts
65 from these operations. To solve a system of set constraints, you derive
66 all possible facts under the rules, which gives you the correct sets
67 as a consequence.
68
69 See "Efficient Field-sensitive pointer analysis for C" by "David
70 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
71 http://citeseer.ist.psu.edu/pearce04efficient.html
72
73 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
74 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
75 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76
77 There are three types of real constraint expressions, DEREF,
78 ADDRESSOF, and SCALAR. Each constraint expression consists
79 of a constraint type, a variable, and an offset.
80
81 SCALAR is a constraint expression type used to represent x, whether
82 it appears on the LHS or the RHS of a statement.
83 DEREF is a constraint expression type used to represent *x, whether
84 it appears on the LHS or the RHS of a statement.
85 ADDRESSOF is a constraint expression used to represent &x, whether
86 it appears on the LHS or the RHS of a statement.
87
88 Each pointer variable in the program is assigned an integer id, and
89 each field of a structure variable is assigned an integer id as well.
90
91 Structure variables are linked to their list of fields through a "next
92 field" in each variable that points to the next field in offset
93 order.
94 Each variable for a structure field has
95
96 1. "size", that tells the size in bits of that field.
97 2. "fullsize, that tells the size in bits of the entire structure.
98 3. "offset", that tells the offset in bits from the beginning of the
99 structure to this field.
100
101 Thus,
102 struct f
103 {
104 int a;
105 int b;
106 } foo;
107 int *bar;
108
109 looks like
110
111 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
112 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
113 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
114
115
116 In order to solve the system of set constraints, the following is
117 done:
118
119 1. Each constraint variable x has a solution set associated with it,
120 Sol(x).
121
122 2. Constraints are separated into direct, copy, and complex.
123 Direct constraints are ADDRESSOF constraints that require no extra
124 processing, such as P = &Q
125 Copy constraints are those of the form P = Q.
126 Complex constraints are all the constraints involving dereferences
127 and offsets (including offsetted copies).
128
129 3. All direct constraints of the form P = &Q are processed, such
130 that Q is added to Sol(P)
131
132 4. All complex constraints for a given constraint variable are stored in a
133 linked list attached to that variable's node.
134
135 5. A directed graph is built out of the copy constraints. Each
136 constraint variable is a node in the graph, and an edge from
137 Q to P is added for each copy constraint of the form P = Q
138
139 6. The graph is then walked, and solution sets are
140 propagated along the copy edges, such that an edge from Q to P
141 causes Sol(P) <- Sol(P) union Sol(Q).
142
143 7. As we visit each node, all complex constraints associated with
144 that node are processed by adding appropriate copy edges to the graph, or the
145 appropriate variables to the solution set.
146
147 8. The process of walking the graph is iterated until no solution
148 sets change.
149
150 Prior to walking the graph in steps 6 and 7, We perform static
151 cycle elimination on the constraint graph, as well
152 as off-line variable substitution.
153
154 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
155 on and turned into anything), but isn't. You can just see what offset
156 inside the pointed-to struct it's going to access.
157
158 TODO: Constant bounded arrays can be handled as if they were structs of the
159 same number of elements.
160
161 TODO: Modeling heap and incoming pointers becomes much better if we
162 add fields to them as we discover them, which we could do.
163
164 TODO: We could handle unions, but to be honest, it's probably not
165 worth the pain or slowdown. */
166
167 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
168 htab_t heapvar_for_stmt;
169
170 static bool use_field_sensitive = true;
171 static int in_ipa_mode = 0;
172
173 /* Used for predecessor bitmaps. */
174 static bitmap_obstack predbitmap_obstack;
175
176 /* Used for points-to sets. */
177 static bitmap_obstack pta_obstack;
178
179 /* Used for oldsolution members of variables. */
180 static bitmap_obstack oldpta_obstack;
181
182 /* Used for per-solver-iteration bitmaps. */
183 static bitmap_obstack iteration_obstack;
184
185 static unsigned int create_variable_info_for (tree, const char *);
186 typedef struct constraint_graph *constraint_graph_t;
187 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188
189 DEF_VEC_P(constraint_t);
190 DEF_VEC_ALLOC_P(constraint_t,heap);
191
192 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
193 if (a) \
194 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195
196 static struct constraint_stats
197 {
198 unsigned int total_vars;
199 unsigned int nonpointer_vars;
200 unsigned int unified_vars_static;
201 unsigned int unified_vars_dynamic;
202 unsigned int iterations;
203 unsigned int num_edges;
204 unsigned int num_implicit_edges;
205 unsigned int points_to_sets_created;
206 } stats;
207
208 struct variable_info
209 {
210 /* ID of this variable */
211 unsigned int id;
212
213 /* Name of this variable */
214 const char *name;
215
216 /* Tree that this variable is associated with. */
217 tree decl;
218
219 /* Offset of this variable, in bits, from the base variable */
220 unsigned HOST_WIDE_INT offset;
221
222 /* Size of the variable, in bits. */
223 unsigned HOST_WIDE_INT size;
224
225 /* Full size of the base variable, in bits. */
226 unsigned HOST_WIDE_INT fullsize;
227
228 /* A link to the variable for the next field in this structure. */
229 struct variable_info *next;
230
231 /* True if the variable is directly the target of a dereference.
232 This is used to track which variables are *actually* dereferenced
233 so we can prune their points to listed. */
234 unsigned int directly_dereferenced:1;
235
236 /* True if this is a variable created by the constraint analysis, such as
237 heap variables and constraints we had to break up. */
238 unsigned int is_artificial_var:1;
239
240 /* True if this is a special variable whose solution set should not be
241 changed. */
242 unsigned int is_special_var:1;
243
244 /* True for variables whose size is not known or variable. */
245 unsigned int is_unknown_size_var:1;
246
247 /* True for variables that have unions somewhere in them. */
248 unsigned int has_union:1;
249
250 /* True if this is a heap variable. */
251 unsigned int is_heap_var:1;
252
253 /* True if we may not use TBAA to prune references to this
254 variable. This is used for C++ placement new. */
255 unsigned int no_tbaa_pruning : 1;
256
257 /* Points-to set for this variable. */
258 bitmap solution;
259
260 /* Old points-to set for this variable. */
261 bitmap oldsolution;
262
263 /* Variable ids represented by this node. */
264 bitmap variables;
265
266 /* Variable id this was collapsed to due to type unsafety. This
267 should be unused completely after build_succ_graph, or something
268 is broken. */
269 struct variable_info *collapsed_to;
270 };
271 typedef struct variable_info *varinfo_t;
272
273 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
274
275 /* Pool of variable info structures. */
276 static alloc_pool variable_info_pool;
277
278 DEF_VEC_P(varinfo_t);
279
280 DEF_VEC_ALLOC_P(varinfo_t, heap);
281
282 /* Table of variable info structures for constraint variables.
283 Indexed directly by variable info id. */
284 static VEC(varinfo_t,heap) *varmap;
285
286 /* Return the varmap element N */
287
288 static inline varinfo_t
289 get_varinfo (unsigned int n)
290 {
291 return VEC_index (varinfo_t, varmap, n);
292 }
293
294 /* Return the varmap element N, following the collapsed_to link. */
295
296 static inline varinfo_t
297 get_varinfo_fc (unsigned int n)
298 {
299 varinfo_t v = VEC_index (varinfo_t, varmap, n);
300
301 if (v->collapsed_to)
302 return v->collapsed_to;
303 return v;
304 }
305
306 /* Variable that represents the unknown pointer. */
307 static varinfo_t var_anything;
308 static tree anything_tree;
309 static unsigned int anything_id;
310
311 /* Variable that represents the NULL pointer. */
312 static varinfo_t var_nothing;
313 static tree nothing_tree;
314 static unsigned int nothing_id;
315
316 /* Variable that represents read only memory. */
317 static varinfo_t var_readonly;
318 static tree readonly_tree;
319 static unsigned int readonly_id;
320
321 /* Variable that represents integers. This is used for when people do things
322 like &0->a.b. */
323 static varinfo_t var_integer;
324 static tree integer_tree;
325 static unsigned int integer_id;
326
327 /* Lookup a heap var for FROM, and return it if we find one. */
328
329 static tree
330 heapvar_lookup (tree from)
331 {
332 struct tree_map *h, in;
333 in.base.from = from;
334
335 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
336 htab_hash_pointer (from));
337 if (h)
338 return h->to;
339 return NULL_TREE;
340 }
341
342 /* Insert a mapping FROM->TO in the heap var for statement
343 hashtable. */
344
345 static void
346 heapvar_insert (tree from, tree to)
347 {
348 struct tree_map *h;
349 void **loc;
350
351 h = GGC_NEW (struct tree_map);
352 h->hash = htab_hash_pointer (from);
353 h->base.from = from;
354 h->to = to;
355 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
356 *(struct tree_map **) loc = h;
357 }
358
359 /* Return a new variable info structure consisting for a variable
360 named NAME, and using constraint graph node NODE. */
361
362 static varinfo_t
363 new_var_info (tree t, unsigned int id, const char *name)
364 {
365 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
366 tree var;
367
368 ret->id = id;
369 ret->name = name;
370 ret->decl = t;
371 ret->directly_dereferenced = false;
372 ret->is_artificial_var = false;
373 ret->is_heap_var = false;
374 ret->is_special_var = false;
375 ret->is_unknown_size_var = false;
376 ret->has_union = false;
377 var = t;
378 if (TREE_CODE (var) == SSA_NAME)
379 var = SSA_NAME_VAR (var);
380 ret->no_tbaa_pruning = (DECL_P (var)
381 && POINTER_TYPE_P (TREE_TYPE (var))
382 && DECL_NO_TBAA_P (var));
383 ret->solution = BITMAP_ALLOC (&pta_obstack);
384 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
385 ret->next = NULL;
386 ret->collapsed_to = NULL;
387 return ret;
388 }
389
390 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
391
392 /* An expression that appears in a constraint. */
393
394 struct constraint_expr
395 {
396 /* Constraint type. */
397 constraint_expr_type type;
398
399 /* Variable we are referring to in the constraint. */
400 unsigned int var;
401
402 /* Offset, in bits, of this constraint from the beginning of
403 variables it ends up referring to.
404
405 IOW, in a deref constraint, we would deref, get the result set,
406 then add OFFSET to each member. */
407 unsigned HOST_WIDE_INT offset;
408 };
409
410 typedef struct constraint_expr ce_s;
411 DEF_VEC_O(ce_s);
412 DEF_VEC_ALLOC_O(ce_s, heap);
413 static void get_constraint_for (tree, VEC(ce_s, heap) **);
414 static void do_deref (VEC (ce_s, heap) **);
415
416 /* Our set constraints are made up of two constraint expressions, one
417 LHS, and one RHS.
418
419 As described in the introduction, our set constraints each represent an
420 operation between set valued variables.
421 */
422 struct constraint
423 {
424 struct constraint_expr lhs;
425 struct constraint_expr rhs;
426 };
427
428 /* List of constraints that we use to build the constraint graph from. */
429
430 static VEC(constraint_t,heap) *constraints;
431 static alloc_pool constraint_pool;
432
433
434 DEF_VEC_I(int);
435 DEF_VEC_ALLOC_I(int, heap);
436
437 /* The constraint graph is represented as an array of bitmaps
438 containing successor nodes. */
439
440 struct constraint_graph
441 {
442 /* Size of this graph, which may be different than the number of
443 nodes in the variable map. */
444 unsigned int size;
445
446 /* Explicit successors of each node. */
447 bitmap *succs;
448
449 /* Implicit predecessors of each node (Used for variable
450 substitution). */
451 bitmap *implicit_preds;
452
453 /* Explicit predecessors of each node (Used for variable substitution). */
454 bitmap *preds;
455
456 /* Indirect cycle representatives, or -1 if the node has no indirect
457 cycles. */
458 int *indirect_cycles;
459
460 /* Representative node for a node. rep[a] == a unless the node has
461 been unified. */
462 unsigned int *rep;
463
464 /* Equivalence class representative for a node. This is used for
465 variable substitution. */
466 int *eq_rep;
467
468 /* Label for each node, used during variable substitution. */
469 unsigned int *label;
470
471 /* Bitmap of nodes where the bit is set if the node is a direct
472 node. Used for variable substitution. */
473 sbitmap direct_nodes;
474
475 /* Vector of complex constraints for each graph node. Complex
476 constraints are those involving dereferences or offsets that are
477 not 0. */
478 VEC(constraint_t,heap) **complex;
479 };
480
481 static constraint_graph_t graph;
482
483 /* During variable substitution and the offline version of indirect
484 cycle finding, we create nodes to represent dereferences and
485 address taken constraints. These represent where these start and
486 end. */
487 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
488 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
489 #define FIRST_ADDR_NODE (LAST_REF_NODE + 1)
490
491 /* Return the representative node for NODE, if NODE has been unioned
492 with another NODE.
493 This function performs path compression along the way to finding
494 the representative. */
495
496 static unsigned int
497 find (unsigned int node)
498 {
499 gcc_assert (node < graph->size);
500 if (graph->rep[node] != node)
501 return graph->rep[node] = find (graph->rep[node]);
502 return node;
503 }
504
505 /* Union the TO and FROM nodes to the TO nodes.
506 Note that at some point in the future, we may want to do
507 union-by-rank, in which case we are going to have to return the
508 node we unified to. */
509
510 static bool
511 unite (unsigned int to, unsigned int from)
512 {
513 gcc_assert (to < graph->size && from < graph->size);
514 if (to != from && graph->rep[from] != to)
515 {
516 graph->rep[from] = to;
517 return true;
518 }
519 return false;
520 }
521
522 /* Create a new constraint consisting of LHS and RHS expressions. */
523
524 static constraint_t
525 new_constraint (const struct constraint_expr lhs,
526 const struct constraint_expr rhs)
527 {
528 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
529 ret->lhs = lhs;
530 ret->rhs = rhs;
531 return ret;
532 }
533
534 /* Print out constraint C to FILE. */
535
536 void
537 dump_constraint (FILE *file, constraint_t c)
538 {
539 if (c->lhs.type == ADDRESSOF)
540 fprintf (file, "&");
541 else if (c->lhs.type == DEREF)
542 fprintf (file, "*");
543 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
544 if (c->lhs.offset != 0)
545 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
546 fprintf (file, " = ");
547 if (c->rhs.type == ADDRESSOF)
548 fprintf (file, "&");
549 else if (c->rhs.type == DEREF)
550 fprintf (file, "*");
551 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
552 if (c->rhs.offset != 0)
553 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
554 fprintf (file, "\n");
555 }
556
557 /* Print out constraint C to stderr. */
558
559 void
560 debug_constraint (constraint_t c)
561 {
562 dump_constraint (stderr, c);
563 }
564
565 /* Print out all constraints to FILE */
566
567 void
568 dump_constraints (FILE *file)
569 {
570 int i;
571 constraint_t c;
572 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
573 dump_constraint (file, c);
574 }
575
576 /* Print out all constraints to stderr. */
577
578 void
579 debug_constraints (void)
580 {
581 dump_constraints (stderr);
582 }
583
584 /* SOLVER FUNCTIONS
585
586 The solver is a simple worklist solver, that works on the following
587 algorithm:
588
589 sbitmap changed_nodes = all zeroes;
590 changed_count = 0;
591 For each node that is not already collapsed:
592 changed_count++;
593 set bit in changed nodes
594
595 while (changed_count > 0)
596 {
597 compute topological ordering for constraint graph
598
599 find and collapse cycles in the constraint graph (updating
600 changed if necessary)
601
602 for each node (n) in the graph in topological order:
603 changed_count--;
604
605 Process each complex constraint associated with the node,
606 updating changed if necessary.
607
608 For each outgoing edge from n, propagate the solution from n to
609 the destination of the edge, updating changed as necessary.
610
611 } */
612
613 /* Return true if two constraint expressions A and B are equal. */
614
615 static bool
616 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
617 {
618 return a.type == b.type && a.var == b.var && a.offset == b.offset;
619 }
620
621 /* Return true if constraint expression A is less than constraint expression
622 B. This is just arbitrary, but consistent, in order to give them an
623 ordering. */
624
625 static bool
626 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
627 {
628 if (a.type == b.type)
629 {
630 if (a.var == b.var)
631 return a.offset < b.offset;
632 else
633 return a.var < b.var;
634 }
635 else
636 return a.type < b.type;
637 }
638
639 /* Return true if constraint A is less than constraint B. This is just
640 arbitrary, but consistent, in order to give them an ordering. */
641
642 static bool
643 constraint_less (const constraint_t a, const constraint_t b)
644 {
645 if (constraint_expr_less (a->lhs, b->lhs))
646 return true;
647 else if (constraint_expr_less (b->lhs, a->lhs))
648 return false;
649 else
650 return constraint_expr_less (a->rhs, b->rhs);
651 }
652
653 /* Return true if two constraints A and B are equal. */
654
655 static bool
656 constraint_equal (struct constraint a, struct constraint b)
657 {
658 return constraint_expr_equal (a.lhs, b.lhs)
659 && constraint_expr_equal (a.rhs, b.rhs);
660 }
661
662
663 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
664
665 static constraint_t
666 constraint_vec_find (VEC(constraint_t,heap) *vec,
667 struct constraint lookfor)
668 {
669 unsigned int place;
670 constraint_t found;
671
672 if (vec == NULL)
673 return NULL;
674
675 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
676 if (place >= VEC_length (constraint_t, vec))
677 return NULL;
678 found = VEC_index (constraint_t, vec, place);
679 if (!constraint_equal (*found, lookfor))
680 return NULL;
681 return found;
682 }
683
684 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
685
686 static void
687 constraint_set_union (VEC(constraint_t,heap) **to,
688 VEC(constraint_t,heap) **from)
689 {
690 int i;
691 constraint_t c;
692
693 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
694 {
695 if (constraint_vec_find (*to, *c) == NULL)
696 {
697 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
698 constraint_less);
699 VEC_safe_insert (constraint_t, heap, *to, place, c);
700 }
701 }
702 }
703
704 /* Take a solution set SET, add OFFSET to each member of the set, and
705 overwrite SET with the result when done. */
706
707 static void
708 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
709 {
710 bitmap result = BITMAP_ALLOC (&iteration_obstack);
711 unsigned int i;
712 bitmap_iterator bi;
713
714 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
715 {
716 /* If this is a properly sized variable, only add offset if it's
717 less than end. Otherwise, it is globbed to a single
718 variable. */
719
720 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
721 {
722 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
723 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
724 if (!v)
725 continue;
726 bitmap_set_bit (result, v->id);
727 }
728 else if (get_varinfo (i)->is_artificial_var
729 || get_varinfo (i)->has_union
730 || get_varinfo (i)->is_unknown_size_var)
731 {
732 bitmap_set_bit (result, i);
733 }
734 }
735
736 bitmap_copy (set, result);
737 BITMAP_FREE (result);
738 }
739
740 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
741 process. */
742
743 static bool
744 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
745 {
746 if (inc == 0)
747 return bitmap_ior_into (to, from);
748 else
749 {
750 bitmap tmp;
751 bool res;
752
753 tmp = BITMAP_ALLOC (&iteration_obstack);
754 bitmap_copy (tmp, from);
755 solution_set_add (tmp, inc);
756 res = bitmap_ior_into (to, tmp);
757 BITMAP_FREE (tmp);
758 return res;
759 }
760 }
761
762 /* Insert constraint C into the list of complex constraints for graph
763 node VAR. */
764
765 static void
766 insert_into_complex (constraint_graph_t graph,
767 unsigned int var, constraint_t c)
768 {
769 VEC (constraint_t, heap) *complex = graph->complex[var];
770 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
771 constraint_less);
772
773 /* Only insert constraints that do not already exist. */
774 if (place >= VEC_length (constraint_t, complex)
775 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
776 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
777 }
778
779
780 /* Condense two variable nodes into a single variable node, by moving
781 all associated info from SRC to TO. */
782
783 static void
784 merge_node_constraints (constraint_graph_t graph, unsigned int to,
785 unsigned int from)
786 {
787 unsigned int i;
788 constraint_t c;
789
790 gcc_assert (find (from) == to);
791
792 /* Move all complex constraints from src node into to node */
793 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
794 {
795 /* In complex constraints for node src, we may have either
796 a = *src, and *src = a, or an offseted constraint which are
797 always added to the rhs node's constraints. */
798
799 if (c->rhs.type == DEREF)
800 c->rhs.var = to;
801 else if (c->lhs.type == DEREF)
802 c->lhs.var = to;
803 else
804 c->rhs.var = to;
805 }
806 constraint_set_union (&graph->complex[to], &graph->complex[from]);
807 VEC_free (constraint_t, heap, graph->complex[from]);
808 graph->complex[from] = NULL;
809 }
810
811
812 /* Remove edges involving NODE from GRAPH. */
813
814 static void
815 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
816 {
817 if (graph->succs[node])
818 BITMAP_FREE (graph->succs[node]);
819 }
820
821 /* Merge GRAPH nodes FROM and TO into node TO. */
822
823 static void
824 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
825 unsigned int from)
826 {
827 if (graph->indirect_cycles[from] != -1)
828 {
829 /* If we have indirect cycles with the from node, and we have
830 none on the to node, the to node has indirect cycles from the
831 from node now that they are unified.
832 If indirect cycles exist on both, unify the nodes that they
833 are in a cycle with, since we know they are in a cycle with
834 each other. */
835 if (graph->indirect_cycles[to] == -1)
836 {
837 graph->indirect_cycles[to] = graph->indirect_cycles[from];
838 }
839 else
840 {
841 unsigned int tonode = find (graph->indirect_cycles[to]);
842 unsigned int fromnode = find (graph->indirect_cycles[from]);
843
844 if (unite (tonode, fromnode))
845 unify_nodes (graph, tonode, fromnode, true);
846 }
847 }
848
849 /* Merge all the successor edges. */
850 if (graph->succs[from])
851 {
852 if (!graph->succs[to])
853 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
854 bitmap_ior_into (graph->succs[to],
855 graph->succs[from]);
856 }
857
858 clear_edges_for_node (graph, from);
859 }
860
861
862 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
863 it doesn't exist in the graph already. */
864
865 static void
866 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
867 unsigned int from)
868 {
869 if (to == from)
870 return;
871
872 if (!graph->implicit_preds[to])
873 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
874
875 if (!bitmap_bit_p (graph->implicit_preds[to], from))
876 {
877 stats.num_implicit_edges++;
878 bitmap_set_bit (graph->implicit_preds[to], from);
879 }
880 }
881
882 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
883 it doesn't exist in the graph already.
884 Return false if the edge already existed, true otherwise. */
885
886 static void
887 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
888 unsigned int from)
889 {
890 if (!graph->preds[to])
891 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
892 if (!bitmap_bit_p (graph->preds[to], from))
893 bitmap_set_bit (graph->preds[to], from);
894 }
895
896 /* Add a graph edge to GRAPH, going from FROM to TO if
897 it doesn't exist in the graph already.
898 Return false if the edge already existed, true otherwise. */
899
900 static bool
901 add_graph_edge (constraint_graph_t graph, unsigned int to,
902 unsigned int from)
903 {
904 if (to == from)
905 {
906 return false;
907 }
908 else
909 {
910 bool r = false;
911
912 if (!graph->succs[from])
913 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
914 if (!bitmap_bit_p (graph->succs[from], to))
915 {
916 r = true;
917 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
918 stats.num_edges++;
919 bitmap_set_bit (graph->succs[from], to);
920 }
921 return r;
922 }
923 }
924
925
926 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
927
928 static bool
929 valid_graph_edge (constraint_graph_t graph, unsigned int src,
930 unsigned int dest)
931 {
932 return (graph->succs[dest]
933 && bitmap_bit_p (graph->succs[dest], src));
934 }
935
936 /* Build the constraint graph, adding only predecessor edges right now. */
937
938 static void
939 build_pred_graph (void)
940 {
941 int i;
942 constraint_t c;
943 unsigned int j;
944
945 graph = XNEW (struct constraint_graph);
946 graph->size = (VEC_length (varinfo_t, varmap)) * 3;
947 graph->succs = XCNEWVEC (bitmap, graph->size);
948 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
949 graph->preds = XCNEWVEC (bitmap, graph->size);
950 graph->indirect_cycles = XNEWVEC (int, VEC_length (varinfo_t, varmap));
951 graph->label = XCNEWVEC (unsigned int, graph->size);
952 graph->rep = XNEWVEC (unsigned int, graph->size);
953 graph->eq_rep = XNEWVEC (int, graph->size);
954 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *,
955 VEC_length (varinfo_t, varmap));
956 graph->direct_nodes = sbitmap_alloc (graph->size);
957 sbitmap_zero (graph->direct_nodes);
958
959 for (j = 0; j < FIRST_REF_NODE; j++)
960 {
961 if (!get_varinfo (j)->is_special_var)
962 SET_BIT (graph->direct_nodes, j);
963 }
964
965 for (j = 0; j < graph->size; j++)
966 {
967 graph->rep[j] = j;
968 graph->eq_rep[j] = -1;
969 }
970
971 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
972 graph->indirect_cycles[j] = -1;
973
974 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
975 {
976 struct constraint_expr lhs = c->lhs;
977 struct constraint_expr rhs = c->rhs;
978 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
979 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
980
981 if (lhs.type == DEREF)
982 {
983 /* *x = y. */
984 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
985 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
986 if (rhs.type == ADDRESSOF)
987 RESET_BIT (graph->direct_nodes, rhsvar);
988 }
989 else if (rhs.type == DEREF)
990 {
991 /* x = *y */
992 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
993 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
994 else
995 RESET_BIT (graph->direct_nodes, lhsvar);
996 }
997 else if (rhs.type == ADDRESSOF)
998 {
999 /* x = &y */
1000 add_pred_graph_edge (graph, lhsvar, FIRST_ADDR_NODE + rhsvar);
1001 /* Implicitly, *x = y */
1002 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1003
1004 RESET_BIT (graph->direct_nodes, rhsvar);
1005 }
1006 else if (lhsvar > anything_id
1007 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1008 {
1009 /* x = y */
1010 add_pred_graph_edge (graph, lhsvar, rhsvar);
1011 /* Implicitly, *x = *y */
1012 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1013 FIRST_REF_NODE + rhsvar);
1014 }
1015 else if (lhs.offset != 0 || rhs.offset != 0)
1016 {
1017 if (rhs.offset != 0)
1018 RESET_BIT (graph->direct_nodes, lhs.var);
1019 if (lhs.offset != 0)
1020 RESET_BIT (graph->direct_nodes, rhs.var);
1021 }
1022 }
1023 }
1024
1025 /* Build the constraint graph, adding successor edges. */
1026
1027 static void
1028 build_succ_graph (void)
1029 {
1030 int i;
1031 constraint_t c;
1032
1033 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1034 {
1035 struct constraint_expr lhs;
1036 struct constraint_expr rhs;
1037 unsigned int lhsvar;
1038 unsigned int rhsvar;
1039
1040 if (!c)
1041 continue;
1042
1043 lhs = c->lhs;
1044 rhs = c->rhs;
1045 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1046 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1047
1048 if (lhs.type == DEREF)
1049 {
1050 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1051 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1052 }
1053 else if (rhs.type == DEREF)
1054 {
1055 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1056 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1057 }
1058 else if (rhs.type == ADDRESSOF)
1059 {
1060 /* x = &y */
1061 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1062 == get_varinfo_fc (rhs.var)->id);
1063 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1064 }
1065 else if (lhsvar > anything_id
1066 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1067 {
1068 add_graph_edge (graph, lhsvar, rhsvar);
1069 }
1070 }
1071 }
1072
1073
1074 /* Changed variables on the last iteration. */
1075 static unsigned int changed_count;
1076 static sbitmap changed;
1077
1078 DEF_VEC_I(unsigned);
1079 DEF_VEC_ALLOC_I(unsigned,heap);
1080
1081
1082 /* Strongly Connected Component visitation info. */
1083
1084 struct scc_info
1085 {
1086 sbitmap visited;
1087 sbitmap roots;
1088 unsigned int *dfs;
1089 unsigned int *node_mapping;
1090 int current_index;
1091 VEC(unsigned,heap) *scc_stack;
1092 };
1093
1094
1095 /* Recursive routine to find strongly connected components in GRAPH.
1096 SI is the SCC info to store the information in, and N is the id of current
1097 graph node we are processing.
1098
1099 This is Tarjan's strongly connected component finding algorithm, as
1100 modified by Nuutila to keep only non-root nodes on the stack.
1101 The algorithm can be found in "On finding the strongly connected
1102 connected components in a directed graph" by Esko Nuutila and Eljas
1103 Soisalon-Soininen, in Information Processing Letters volume 49,
1104 number 1, pages 9-14. */
1105
1106 static void
1107 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1108 {
1109 unsigned int i;
1110 bitmap_iterator bi;
1111 unsigned int my_dfs;
1112
1113 SET_BIT (si->visited, n);
1114 si->dfs[n] = si->current_index ++;
1115 my_dfs = si->dfs[n];
1116
1117 /* Visit all the successors. */
1118 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1119 {
1120 unsigned int w;
1121
1122 if (i > LAST_REF_NODE)
1123 break;
1124
1125 w = find (i);
1126 if (TEST_BIT (si->roots, w))
1127 continue;
1128
1129 if (!TEST_BIT (si->visited, w))
1130 scc_visit (graph, si, w);
1131 {
1132 unsigned int t = find (w);
1133 unsigned int nnode = find (n);
1134 gcc_assert (nnode == n);
1135
1136 if (si->dfs[t] < si->dfs[nnode])
1137 si->dfs[n] = si->dfs[t];
1138 }
1139 }
1140
1141 /* See if any components have been identified. */
1142 if (si->dfs[n] == my_dfs)
1143 {
1144 if (VEC_length (unsigned, si->scc_stack) > 0
1145 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1146 {
1147 bitmap scc = BITMAP_ALLOC (NULL);
1148 bool have_ref_node = n >= FIRST_REF_NODE;
1149 unsigned int lowest_node;
1150 bitmap_iterator bi;
1151
1152 bitmap_set_bit (scc, n);
1153
1154 while (VEC_length (unsigned, si->scc_stack) != 0
1155 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1156 {
1157 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1158
1159 bitmap_set_bit (scc, w);
1160 if (w >= FIRST_REF_NODE)
1161 have_ref_node = true;
1162 }
1163
1164 lowest_node = bitmap_first_set_bit (scc);
1165 gcc_assert (lowest_node < FIRST_REF_NODE);
1166 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1167 {
1168 if (i < FIRST_REF_NODE)
1169 {
1170 /* Mark this node for collapsing. */
1171 if (unite (lowest_node, i))
1172 unify_nodes (graph, lowest_node, i, false);
1173 }
1174 else
1175 {
1176 unite (lowest_node, i);
1177 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1178 }
1179 }
1180 }
1181 SET_BIT (si->roots, n);
1182 }
1183 else
1184 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1185 }
1186
1187 /* Unify node FROM into node TO, updating the changed count if
1188 necessary when UPDATE_CHANGED is true. */
1189
1190 static void
1191 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1192 bool update_changed)
1193 {
1194
1195 gcc_assert (to != from && find (to) == to);
1196 if (dump_file && (dump_flags & TDF_DETAILS))
1197 fprintf (dump_file, "Unifying %s to %s\n",
1198 get_varinfo (from)->name,
1199 get_varinfo (to)->name);
1200
1201 if (update_changed)
1202 stats.unified_vars_dynamic++;
1203 else
1204 stats.unified_vars_static++;
1205
1206 merge_graph_nodes (graph, to, from);
1207 merge_node_constraints (graph, to, from);
1208
1209 if (get_varinfo (from)->no_tbaa_pruning)
1210 get_varinfo (to)->no_tbaa_pruning = true;
1211
1212 if (update_changed && TEST_BIT (changed, from))
1213 {
1214 RESET_BIT (changed, from);
1215 if (!TEST_BIT (changed, to))
1216 SET_BIT (changed, to);
1217 else
1218 {
1219 gcc_assert (changed_count > 0);
1220 changed_count--;
1221 }
1222 }
1223
1224 /* If the solution changes because of the merging, we need to mark
1225 the variable as changed. */
1226 if (bitmap_ior_into (get_varinfo (to)->solution,
1227 get_varinfo (from)->solution))
1228 {
1229 if (update_changed && !TEST_BIT (changed, to))
1230 {
1231 SET_BIT (changed, to);
1232 changed_count++;
1233 }
1234 }
1235
1236 BITMAP_FREE (get_varinfo (from)->solution);
1237 BITMAP_FREE (get_varinfo (from)->oldsolution);
1238
1239 if (stats.iterations > 0)
1240 {
1241 BITMAP_FREE (get_varinfo (to)->oldsolution);
1242 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1243 }
1244
1245 if (valid_graph_edge (graph, to, to))
1246 {
1247 if (graph->succs[to])
1248 bitmap_clear_bit (graph->succs[to], to);
1249 }
1250 }
1251
1252 /* Information needed to compute the topological ordering of a graph. */
1253
1254 struct topo_info
1255 {
1256 /* sbitmap of visited nodes. */
1257 sbitmap visited;
1258 /* Array that stores the topological order of the graph, *in
1259 reverse*. */
1260 VEC(unsigned,heap) *topo_order;
1261 };
1262
1263
1264 /* Initialize and return a topological info structure. */
1265
1266 static struct topo_info *
1267 init_topo_info (void)
1268 {
1269 size_t size = VEC_length (varinfo_t, varmap);
1270 struct topo_info *ti = XNEW (struct topo_info);
1271 ti->visited = sbitmap_alloc (size);
1272 sbitmap_zero (ti->visited);
1273 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1274 return ti;
1275 }
1276
1277
1278 /* Free the topological sort info pointed to by TI. */
1279
1280 static void
1281 free_topo_info (struct topo_info *ti)
1282 {
1283 sbitmap_free (ti->visited);
1284 VEC_free (unsigned, heap, ti->topo_order);
1285 free (ti);
1286 }
1287
1288 /* Visit the graph in topological order, and store the order in the
1289 topo_info structure. */
1290
1291 static void
1292 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1293 unsigned int n)
1294 {
1295 bitmap_iterator bi;
1296 unsigned int j;
1297
1298 SET_BIT (ti->visited, n);
1299
1300 if (graph->succs[n])
1301 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1302 {
1303 if (!TEST_BIT (ti->visited, j))
1304 topo_visit (graph, ti, j);
1305 }
1306
1307 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1308 }
1309
1310 /* Return true if variable N + OFFSET is a legal field of N. */
1311
1312 static bool
1313 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1314 {
1315 varinfo_t ninfo = get_varinfo (n);
1316
1317 /* For things we've globbed to single variables, any offset into the
1318 variable acts like the entire variable, so that it becomes offset
1319 0. */
1320 if (ninfo->is_special_var
1321 || ninfo->is_artificial_var
1322 || ninfo->is_unknown_size_var)
1323 {
1324 *offset = 0;
1325 return true;
1326 }
1327 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1328 }
1329
1330 /* Process a constraint C that represents *x = &y. */
1331
1332 static void
1333 do_da_constraint (constraint_graph_t graph ATTRIBUTE_UNUSED,
1334 constraint_t c, bitmap delta)
1335 {
1336 unsigned int rhs = c->rhs.var;
1337 unsigned int j;
1338 bitmap_iterator bi;
1339
1340 /* For each member j of Delta (Sol(x)), add x to Sol(j) */
1341 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1342 {
1343 unsigned HOST_WIDE_INT offset = c->lhs.offset;
1344 if (type_safe (j, &offset) && !(get_varinfo (j)->is_special_var))
1345 {
1346 /* *x != NULL && *x != ANYTHING*/
1347 varinfo_t v;
1348 unsigned int t;
1349 bitmap sol;
1350 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + offset;
1351
1352 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1353 if (!v)
1354 continue;
1355 t = find (v->id);
1356 sol = get_varinfo (t)->solution;
1357 if (!bitmap_bit_p (sol, rhs))
1358 {
1359 bitmap_set_bit (sol, rhs);
1360 if (!TEST_BIT (changed, t))
1361 {
1362 SET_BIT (changed, t);
1363 changed_count++;
1364 }
1365 }
1366 }
1367 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1368 fprintf (dump_file, "Untypesafe usage in do_da_constraint.\n");
1369
1370 }
1371 }
1372
1373 /* Process a constraint C that represents x = *y, using DELTA as the
1374 starting solution. */
1375
1376 static void
1377 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1378 bitmap delta)
1379 {
1380 unsigned int lhs = find (c->lhs.var);
1381 bool flag = false;
1382 bitmap sol = get_varinfo (lhs)->solution;
1383 unsigned int j;
1384 bitmap_iterator bi;
1385
1386 if (bitmap_bit_p (delta, anything_id))
1387 {
1388 flag = !bitmap_bit_p (sol, anything_id);
1389 if (flag)
1390 bitmap_set_bit (sol, anything_id);
1391 goto done;
1392 }
1393 /* For each variable j in delta (Sol(y)), add
1394 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1395 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1396 {
1397 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1398 if (type_safe (j, &roffset))
1399 {
1400 varinfo_t v;
1401 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1402 unsigned int t;
1403
1404 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1405 if (!v)
1406 continue;
1407 t = find (v->id);
1408
1409 /* Adding edges from the special vars is pointless.
1410 They don't have sets that can change. */
1411 if (get_varinfo (t) ->is_special_var)
1412 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1413 else if (add_graph_edge (graph, lhs, t))
1414 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1415 }
1416 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1417 fprintf (dump_file, "Untypesafe usage in do_sd_constraint\n");
1418
1419 }
1420
1421 done:
1422 /* If the LHS solution changed, mark the var as changed. */
1423 if (flag)
1424 {
1425 get_varinfo (lhs)->solution = sol;
1426 if (!TEST_BIT (changed, lhs))
1427 {
1428 SET_BIT (changed, lhs);
1429 changed_count++;
1430 }
1431 }
1432 }
1433
1434 /* Process a constraint C that represents *x = y. */
1435
1436 static void
1437 do_ds_constraint (constraint_t c, bitmap delta)
1438 {
1439 unsigned int rhs = find (c->rhs.var);
1440 unsigned HOST_WIDE_INT roff = c->rhs.offset;
1441 bitmap sol = get_varinfo (rhs)->solution;
1442 unsigned int j;
1443 bitmap_iterator bi;
1444
1445 if (bitmap_bit_p (sol, anything_id))
1446 {
1447 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1448 {
1449 varinfo_t jvi = get_varinfo (j);
1450 unsigned int t;
1451 unsigned int loff = c->lhs.offset;
1452 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1453 varinfo_t v;
1454
1455 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1456 if (!v)
1457 continue;
1458 t = find (v->id);
1459
1460 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1461 {
1462 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1463 if (!TEST_BIT (changed, t))
1464 {
1465 SET_BIT (changed, t);
1466 changed_count++;
1467 }
1468 }
1469 }
1470 return;
1471 }
1472
1473 /* For each member j of delta (Sol(x)), add an edge from y to j and
1474 union Sol(y) into Sol(j) */
1475 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1476 {
1477 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1478 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1479 {
1480 varinfo_t v;
1481 unsigned int t;
1482 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1483 bitmap tmp;
1484
1485 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1486 if (!v)
1487 continue;
1488 t = find (v->id);
1489 tmp = get_varinfo (t)->solution;
1490
1491 if (set_union_with_increment (tmp, sol, roff))
1492 {
1493 get_varinfo (t)->solution = tmp;
1494 if (t == rhs)
1495 sol = get_varinfo (rhs)->solution;
1496 if (!TEST_BIT (changed, t))
1497 {
1498 SET_BIT (changed, t);
1499 changed_count++;
1500 }
1501 }
1502 }
1503 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1504 fprintf (dump_file, "Untypesafe usage in do_ds_constraint\n");
1505 }
1506 }
1507
1508 /* Handle a non-simple (simple meaning requires no iteration),
1509 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1510
1511 static void
1512 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1513 {
1514 if (c->lhs.type == DEREF)
1515 {
1516 if (c->rhs.type == ADDRESSOF)
1517 {
1518 /* *x = &y */
1519 do_da_constraint (graph, c, delta);
1520 }
1521 else
1522 {
1523 /* *x = y */
1524 do_ds_constraint (c, delta);
1525 }
1526 }
1527 else if (c->rhs.type == DEREF)
1528 {
1529 /* x = *y */
1530 if (!(get_varinfo (c->lhs.var)->is_special_var))
1531 do_sd_constraint (graph, c, delta);
1532 }
1533 else
1534 {
1535 bitmap tmp;
1536 bitmap solution;
1537 bool flag = false;
1538 unsigned int t;
1539
1540 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1541 t = find (c->rhs.var);
1542 solution = get_varinfo (t)->solution;
1543 t = find (c->lhs.var);
1544 tmp = get_varinfo (t)->solution;
1545
1546 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1547
1548 if (flag)
1549 {
1550 get_varinfo (t)->solution = tmp;
1551 if (!TEST_BIT (changed, t))
1552 {
1553 SET_BIT (changed, t);
1554 changed_count++;
1555 }
1556 }
1557 }
1558 }
1559
1560 /* Initialize and return a new SCC info structure. */
1561
1562 static struct scc_info *
1563 init_scc_info (size_t size)
1564 {
1565 struct scc_info *si = XNEW (struct scc_info);
1566 size_t i;
1567
1568 si->current_index = 0;
1569 si->visited = sbitmap_alloc (size);
1570 sbitmap_zero (si->visited);
1571 si->roots = sbitmap_alloc (size);
1572 sbitmap_zero (si->roots);
1573 si->node_mapping = XNEWVEC (unsigned int, size);
1574 si->dfs = XCNEWVEC (unsigned int, size);
1575
1576 for (i = 0; i < size; i++)
1577 si->node_mapping[i] = i;
1578
1579 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1580 return si;
1581 }
1582
1583 /* Free an SCC info structure pointed to by SI */
1584
1585 static void
1586 free_scc_info (struct scc_info *si)
1587 {
1588 sbitmap_free (si->visited);
1589 sbitmap_free (si->roots);
1590 free (si->node_mapping);
1591 free (si->dfs);
1592 VEC_free (unsigned, heap, si->scc_stack);
1593 free (si);
1594 }
1595
1596
1597 /* Find indirect cycles in GRAPH that occur, using strongly connected
1598 components, and note them in the indirect cycles map.
1599
1600 This technique comes from Ben Hardekopf and Calvin Lin,
1601 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1602 Lines of Code", submitted to PLDI 2007. */
1603
1604 static void
1605 find_indirect_cycles (constraint_graph_t graph)
1606 {
1607 unsigned int i;
1608 unsigned int size = graph->size;
1609 struct scc_info *si = init_scc_info (size);
1610
1611 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1612 if (!TEST_BIT (si->visited, i) && find (i) == i)
1613 scc_visit (graph, si, i);
1614
1615 free_scc_info (si);
1616 }
1617
1618 /* Compute a topological ordering for GRAPH, and store the result in the
1619 topo_info structure TI. */
1620
1621 static void
1622 compute_topo_order (constraint_graph_t graph,
1623 struct topo_info *ti)
1624 {
1625 unsigned int i;
1626 unsigned int size = VEC_length (varinfo_t, varmap);
1627
1628 for (i = 0; i != size; ++i)
1629 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1630 topo_visit (graph, ti, i);
1631 }
1632
1633 /* Perform offline variable substitution.
1634
1635 This is a linear time way of identifying variables that must have
1636 equivalent points-to sets, including those caused by static cycles,
1637 and single entry subgraphs, in the constraint graph.
1638
1639 The technique is described in "Off-line variable substitution for
1640 scaling points-to analysis" by Atanas Rountev and Satish Chandra,
1641 in "ACM SIGPLAN Notices" volume 35, number 5, pages 47-56.
1642
1643 There is an optimal way to do this involving hash based value
1644 numbering, once the technique is published i will implement it
1645 here.
1646
1647 The general method of finding equivalence classes is as follows:
1648 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1649 Add fake nodes (ADDRESS nodes) and edges for a = &b constraints.
1650 Initialize all non-REF/ADDRESS nodes to be direct nodes
1651 For each SCC in the predecessor graph:
1652 for each member (x) of the SCC
1653 if x is not a direct node:
1654 set rootnode(SCC) to be not a direct node
1655 collapse node x into rootnode(SCC).
1656 if rootnode(SCC) is not a direct node:
1657 label rootnode(SCC) with a new equivalence class
1658 else:
1659 if all labeled predecessors of rootnode(SCC) have the same
1660 label:
1661 label rootnode(SCC) with this label
1662 else:
1663 label rootnode(SCC) with a new equivalence class
1664
1665 All direct nodes with the same equivalence class can be replaced
1666 with a single representative node.
1667 All unlabeled nodes (label == 0) are not pointers and all edges
1668 involving them can be eliminated.
1669 We perform these optimizations during move_complex_constraints.
1670 */
1671
1672 static int equivalence_class;
1673
1674 /* Recursive routine to find strongly connected components in GRAPH,
1675 and label it's nodes with equivalence classes.
1676 This is used during variable substitution to find cycles involving
1677 the regular or implicit predecessors, and label them as equivalent.
1678 The SCC finding algorithm used is the same as that for scc_visit. */
1679
1680 static void
1681 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1682 {
1683 unsigned int i;
1684 bitmap_iterator bi;
1685 unsigned int my_dfs;
1686
1687 gcc_assert (si->node_mapping[n] == n);
1688 SET_BIT (si->visited, n);
1689 si->dfs[n] = si->current_index ++;
1690 my_dfs = si->dfs[n];
1691
1692 /* Visit all the successors. */
1693 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1694 {
1695 unsigned int w = si->node_mapping[i];
1696
1697 if (TEST_BIT (si->roots, w))
1698 continue;
1699
1700 if (!TEST_BIT (si->visited, w))
1701 label_visit (graph, si, w);
1702 {
1703 unsigned int t = si->node_mapping[w];
1704 unsigned int nnode = si->node_mapping[n];
1705 gcc_assert (nnode == n);
1706
1707 if (si->dfs[t] < si->dfs[nnode])
1708 si->dfs[n] = si->dfs[t];
1709 }
1710 }
1711
1712 /* Visit all the implicit predecessors. */
1713 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1714 {
1715 unsigned int w = si->node_mapping[i];
1716
1717 if (TEST_BIT (si->roots, w))
1718 continue;
1719
1720 if (!TEST_BIT (si->visited, w))
1721 label_visit (graph, si, w);
1722 {
1723 unsigned int t = si->node_mapping[w];
1724 unsigned int nnode = si->node_mapping[n];
1725 gcc_assert (nnode == n);
1726
1727 if (si->dfs[t] < si->dfs[nnode])
1728 si->dfs[n] = si->dfs[t];
1729 }
1730 }
1731
1732 /* See if any components have been identified. */
1733 if (si->dfs[n] == my_dfs)
1734 {
1735 while (VEC_length (unsigned, si->scc_stack) != 0
1736 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1737 {
1738 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1739 si->node_mapping[w] = n;
1740
1741 if (!TEST_BIT (graph->direct_nodes, w))
1742 RESET_BIT (graph->direct_nodes, n);
1743 }
1744 SET_BIT (si->roots, n);
1745
1746 if (!TEST_BIT (graph->direct_nodes, n))
1747 {
1748 graph->label[n] = equivalence_class++;
1749 }
1750 else
1751 {
1752 unsigned int size = 0;
1753 unsigned int firstlabel = ~0;
1754
1755 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1756 {
1757 unsigned int j = si->node_mapping[i];
1758
1759 if (j == n || graph->label[j] == 0)
1760 continue;
1761
1762 if (firstlabel == (unsigned int)~0)
1763 {
1764 firstlabel = graph->label[j];
1765 size++;
1766 }
1767 else if (graph->label[j] != firstlabel)
1768 size++;
1769 }
1770
1771 if (size == 0)
1772 graph->label[n] = 0;
1773 else if (size == 1)
1774 graph->label[n] = firstlabel;
1775 else
1776 graph->label[n] = equivalence_class++;
1777 }
1778 }
1779 else
1780 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1781 }
1782
1783 /* Perform offline variable substitution, discovering equivalence
1784 classes, and eliminating non-pointer variables. */
1785
1786 static struct scc_info *
1787 perform_var_substitution (constraint_graph_t graph)
1788 {
1789 unsigned int i;
1790 unsigned int size = graph->size;
1791 struct scc_info *si = init_scc_info (size);
1792
1793 bitmap_obstack_initialize (&iteration_obstack);
1794 equivalence_class = 0;
1795
1796 /* We only need to visit the non-address nodes for labeling
1797 purposes, as the address nodes will never have any predecessors,
1798 because &x never appears on the LHS of a constraint. */
1799 for (i = 0; i < LAST_REF_NODE; i++)
1800 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1801 label_visit (graph, si, si->node_mapping[i]);
1802
1803 if (dump_file && (dump_flags & TDF_DETAILS))
1804 for (i = 0; i < FIRST_REF_NODE; i++)
1805 {
1806 bool direct_node = TEST_BIT (graph->direct_nodes, i);
1807 fprintf (dump_file,
1808 "Equivalence class for %s node id %d:%s is %d\n",
1809 direct_node ? "Direct node" : "Indirect node", i,
1810 get_varinfo (i)->name,
1811 graph->label[si->node_mapping[i]]);
1812 }
1813
1814 /* Quickly eliminate our non-pointer variables. */
1815
1816 for (i = 0; i < FIRST_REF_NODE; i++)
1817 {
1818 unsigned int node = si->node_mapping[i];
1819
1820 if (graph->label[node] == 0 && TEST_BIT (graph->direct_nodes, node))
1821 {
1822 if (dump_file && (dump_flags & TDF_DETAILS))
1823 fprintf (dump_file,
1824 "%s is a non-pointer variable, eliminating edges.\n",
1825 get_varinfo (node)->name);
1826 stats.nonpointer_vars++;
1827 clear_edges_for_node (graph, node);
1828 }
1829 }
1830 return si;
1831 }
1832
1833 /* Free information that was only necessary for variable
1834 substitution. */
1835
1836 static void
1837 free_var_substitution_info (struct scc_info *si)
1838 {
1839 free_scc_info (si);
1840 free (graph->label);
1841 free (graph->eq_rep);
1842 sbitmap_free (graph->direct_nodes);
1843 bitmap_obstack_release (&iteration_obstack);
1844 }
1845
1846 /* Return an existing node that is equivalent to NODE, which has
1847 equivalence class LABEL, if one exists. Return NODE otherwise. */
1848
1849 static unsigned int
1850 find_equivalent_node (constraint_graph_t graph,
1851 unsigned int node, unsigned int label)
1852 {
1853 /* If the address version of this variable is unused, we can
1854 substitute it for anything else with the same label.
1855 Otherwise, we know the pointers are equivalent, but not the
1856 locations. */
1857
1858 if (graph->label[FIRST_ADDR_NODE + node] == 0)
1859 {
1860 gcc_assert (label < graph->size);
1861
1862 if (graph->eq_rep[label] != -1)
1863 {
1864 /* Unify the two variables since we know they are equivalent. */
1865 if (unite (graph->eq_rep[label], node))
1866 unify_nodes (graph, graph->eq_rep[label], node, false);
1867 return graph->eq_rep[label];
1868 }
1869 else
1870 {
1871 graph->eq_rep[label] = node;
1872 }
1873 }
1874 return node;
1875 }
1876
1877 /* Move complex constraints to the appropriate nodes, and collapse
1878 variables we've discovered are equivalent during variable
1879 substitution. SI is the SCC_INFO that is the result of
1880 perform_variable_substitution. */
1881
1882 static void
1883 move_complex_constraints (constraint_graph_t graph,
1884 struct scc_info *si)
1885 {
1886 int i;
1887 unsigned int j;
1888 constraint_t c;
1889
1890 for (j = 0; j < graph->size; j++)
1891 gcc_assert (find (j) == j);
1892
1893 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1894 {
1895 struct constraint_expr lhs = c->lhs;
1896 struct constraint_expr rhs = c->rhs;
1897 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
1898 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
1899 unsigned int lhsnode, rhsnode;
1900 unsigned int lhslabel, rhslabel;
1901
1902 lhsnode = si->node_mapping[lhsvar];
1903 rhsnode = si->node_mapping[rhsvar];
1904 lhslabel = graph->label[lhsnode];
1905 rhslabel = graph->label[rhsnode];
1906
1907 /* See if it is really a non-pointer variable, and if so, ignore
1908 the constraint. */
1909 if (lhslabel == 0)
1910 {
1911 if (!TEST_BIT (graph->direct_nodes, lhsnode))
1912 lhslabel = graph->label[lhsnode] = equivalence_class++;
1913 else
1914 {
1915 if (dump_file && (dump_flags & TDF_DETAILS))
1916 {
1917
1918 fprintf (dump_file, "%s is a non-pointer variable,"
1919 "ignoring constraint:",
1920 get_varinfo (lhs.var)->name);
1921 dump_constraint (dump_file, c);
1922 }
1923 VEC_replace (constraint_t, constraints, i, NULL);
1924 continue;
1925 }
1926 }
1927
1928 if (rhslabel == 0)
1929 {
1930 if (!TEST_BIT (graph->direct_nodes, rhsnode))
1931 rhslabel = graph->label[rhsnode] = equivalence_class++;
1932 else
1933 {
1934 if (dump_file && (dump_flags & TDF_DETAILS))
1935 {
1936
1937 fprintf (dump_file, "%s is a non-pointer variable,"
1938 "ignoring constraint:",
1939 get_varinfo (rhs.var)->name);
1940 dump_constraint (dump_file, c);
1941 }
1942 VEC_replace (constraint_t, constraints, i, NULL);
1943 continue;
1944 }
1945 }
1946
1947 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
1948 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
1949 c->lhs.var = lhsvar;
1950 c->rhs.var = rhsvar;
1951
1952 if (lhs.type == DEREF)
1953 {
1954 if (rhs.type == ADDRESSOF || rhsvar > anything_id)
1955 insert_into_complex (graph, lhsvar, c);
1956 }
1957 else if (rhs.type == DEREF)
1958 {
1959 if (!(get_varinfo (lhsvar)->is_special_var))
1960 insert_into_complex (graph, rhsvar, c);
1961 }
1962 else if (rhs.type != ADDRESSOF && lhsvar > anything_id
1963 && (lhs.offset != 0 || rhs.offset != 0))
1964 {
1965 insert_into_complex (graph, rhsvar, c);
1966 }
1967
1968 }
1969 }
1970
1971 /* Eliminate indirect cycles involving NODE. Return true if NODE was
1972 part of an SCC, false otherwise. */
1973
1974 static bool
1975 eliminate_indirect_cycles (unsigned int node)
1976 {
1977 if (graph->indirect_cycles[node] != -1
1978 && !bitmap_empty_p (get_varinfo (node)->solution))
1979 {
1980 unsigned int i;
1981 VEC(unsigned,heap) *queue = NULL;
1982 int queuepos;
1983 unsigned int to = find (graph->indirect_cycles[node]);
1984 bitmap_iterator bi;
1985
1986 /* We can't touch the solution set and call unify_nodes
1987 at the same time, because unify_nodes is going to do
1988 bitmap unions into it. */
1989
1990 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
1991 {
1992 if (find (i) == i && i != to)
1993 {
1994 if (unite (to, i))
1995 VEC_safe_push (unsigned, heap, queue, i);
1996 }
1997 }
1998
1999 for (queuepos = 0;
2000 VEC_iterate (unsigned, queue, queuepos, i);
2001 queuepos++)
2002 {
2003 unify_nodes (graph, to, i, true);
2004 }
2005 VEC_free (unsigned, heap, queue);
2006 return true;
2007 }
2008 return false;
2009 }
2010
2011 /* Solve the constraint graph GRAPH using our worklist solver.
2012 This is based on the PW* family of solvers from the "Efficient Field
2013 Sensitive Pointer Analysis for C" paper.
2014 It works by iterating over all the graph nodes, processing the complex
2015 constraints and propagating the copy constraints, until everything stops
2016 changed. This corresponds to steps 6-8 in the solving list given above. */
2017
2018 static void
2019 solve_graph (constraint_graph_t graph)
2020 {
2021 unsigned int size = VEC_length (varinfo_t, varmap);
2022 unsigned int i;
2023 bitmap pts;
2024
2025 changed_count = 0;
2026 changed = sbitmap_alloc (size);
2027 sbitmap_zero (changed);
2028
2029 /* Mark all initial non-collapsed nodes as changed. */
2030 for (i = 0; i < size; i++)
2031 {
2032 varinfo_t ivi = get_varinfo (i);
2033 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2034 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2035 || VEC_length (constraint_t, graph->complex[i]) > 0))
2036 {
2037 SET_BIT (changed, i);
2038 changed_count++;
2039 }
2040 }
2041
2042 /* Allocate a bitmap to be used to store the changed bits. */
2043 pts = BITMAP_ALLOC (&pta_obstack);
2044
2045 while (changed_count > 0)
2046 {
2047 unsigned int i;
2048 struct topo_info *ti = init_topo_info ();
2049 stats.iterations++;
2050
2051 bitmap_obstack_initialize (&iteration_obstack);
2052
2053 compute_topo_order (graph, ti);
2054
2055 while (VEC_length (unsigned, ti->topo_order) != 0)
2056 {
2057
2058 i = VEC_pop (unsigned, ti->topo_order);
2059
2060 /* If this variable is not a representative, skip it. */
2061 if (find (i) != i)
2062 continue;
2063
2064 /* In certain indirect cycle cases, we may merge this
2065 variable to another. */
2066 if (eliminate_indirect_cycles (i) && find (i) != i)
2067 continue;
2068
2069 /* If the node has changed, we need to process the
2070 complex constraints and outgoing edges again. */
2071 if (TEST_BIT (changed, i))
2072 {
2073 unsigned int j;
2074 constraint_t c;
2075 bitmap solution;
2076 VEC(constraint_t,heap) *complex = graph->complex[i];
2077 bool solution_empty;
2078
2079 RESET_BIT (changed, i);
2080 changed_count--;
2081
2082 /* Compute the changed set of solution bits. */
2083 bitmap_and_compl (pts, get_varinfo (i)->solution,
2084 get_varinfo (i)->oldsolution);
2085
2086 if (bitmap_empty_p (pts))
2087 continue;
2088
2089 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2090
2091 solution = get_varinfo (i)->solution;
2092 solution_empty = bitmap_empty_p (solution);
2093
2094 /* Process the complex constraints */
2095 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2096 {
2097 /* The only complex constraint that can change our
2098 solution to non-empty, given an empty solution,
2099 is a constraint where the lhs side is receiving
2100 some set from elsewhere. */
2101 if (!solution_empty || c->lhs.type != DEREF)
2102 do_complex_constraint (graph, c, pts);
2103 }
2104
2105 solution_empty = bitmap_empty_p (solution);
2106
2107 if (!solution_empty)
2108 {
2109 bitmap_iterator bi;
2110
2111 /* Propagate solution to all successors. */
2112 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2113 0, j, bi)
2114 {
2115 bitmap tmp;
2116 bool flag;
2117
2118 unsigned int to = find (j);
2119 tmp = get_varinfo (to)->solution;
2120 flag = false;
2121
2122 /* Don't try to propagate to ourselves. */
2123 if (to == i)
2124 continue;
2125
2126 flag = set_union_with_increment (tmp, pts, 0);
2127
2128 if (flag)
2129 {
2130 get_varinfo (to)->solution = tmp;
2131 if (!TEST_BIT (changed, to))
2132 {
2133 SET_BIT (changed, to);
2134 changed_count++;
2135 }
2136 }
2137 }
2138 }
2139 }
2140 }
2141 free_topo_info (ti);
2142 bitmap_obstack_release (&iteration_obstack);
2143 }
2144
2145 BITMAP_FREE (pts);
2146 sbitmap_free (changed);
2147 bitmap_obstack_release (&oldpta_obstack);
2148 }
2149
2150 /* Map from trees to variable infos. */
2151 static struct pointer_map_t *vi_for_tree;
2152
2153
2154 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2155
2156 static void
2157 insert_vi_for_tree (tree t, varinfo_t vi)
2158 {
2159 void **slot = pointer_map_insert (vi_for_tree, t);
2160 gcc_assert (vi);
2161 gcc_assert (*slot == NULL);
2162 *slot = vi;
2163 }
2164
2165 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2166 exist in the map, return NULL, otherwise, return the varinfo we found. */
2167
2168 static varinfo_t
2169 lookup_vi_for_tree (tree t)
2170 {
2171 void **slot = pointer_map_contains (vi_for_tree, t);
2172 if (slot == NULL)
2173 return NULL;
2174
2175 return (varinfo_t) *slot;
2176 }
2177
2178 /* Return a printable name for DECL */
2179
2180 static const char *
2181 alias_get_name (tree decl)
2182 {
2183 const char *res = get_name (decl);
2184 char *temp;
2185 int num_printed = 0;
2186
2187 if (res != NULL)
2188 return res;
2189
2190 res = "NULL";
2191 if (!dump_file)
2192 return res;
2193
2194 if (TREE_CODE (decl) == SSA_NAME)
2195 {
2196 num_printed = asprintf (&temp, "%s_%u",
2197 alias_get_name (SSA_NAME_VAR (decl)),
2198 SSA_NAME_VERSION (decl));
2199 }
2200 else if (DECL_P (decl))
2201 {
2202 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2203 }
2204 if (num_printed > 0)
2205 {
2206 res = ggc_strdup (temp);
2207 free (temp);
2208 }
2209 return res;
2210 }
2211
2212 /* Find the variable id for tree T in the map.
2213 If T doesn't exist in the map, create an entry for it and return it. */
2214
2215 static varinfo_t
2216 get_vi_for_tree (tree t)
2217 {
2218 void **slot = pointer_map_contains (vi_for_tree, t);
2219 if (slot == NULL)
2220 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2221
2222 return (varinfo_t) *slot;
2223 }
2224
2225 /* Get a constraint expression from an SSA_VAR_P node. */
2226
2227 static struct constraint_expr
2228 get_constraint_exp_from_ssa_var (tree t)
2229 {
2230 struct constraint_expr cexpr;
2231
2232 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2233
2234 /* For parameters, get at the points-to set for the actual parm
2235 decl. */
2236 if (TREE_CODE (t) == SSA_NAME
2237 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2238 && SSA_NAME_IS_DEFAULT_DEF (t))
2239 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2240
2241 cexpr.type = SCALAR;
2242
2243 cexpr.var = get_vi_for_tree (t)->id;
2244 /* If we determine the result is "anything", and we know this is readonly,
2245 say it points to readonly memory instead. */
2246 if (cexpr.var == anything_id && TREE_READONLY (t))
2247 {
2248 cexpr.type = ADDRESSOF;
2249 cexpr.var = readonly_id;
2250 }
2251
2252 cexpr.offset = 0;
2253 return cexpr;
2254 }
2255
2256 /* Process a completed constraint T, and add it to the constraint
2257 list. */
2258
2259 static void
2260 process_constraint (constraint_t t)
2261 {
2262 struct constraint_expr rhs = t->rhs;
2263 struct constraint_expr lhs = t->lhs;
2264
2265 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2266 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2267
2268 if (lhs.type == DEREF)
2269 get_varinfo (lhs.var)->directly_dereferenced = true;
2270 if (rhs.type == DEREF)
2271 get_varinfo (rhs.var)->directly_dereferenced = true;
2272
2273 if (!use_field_sensitive)
2274 {
2275 t->rhs.offset = 0;
2276 t->lhs.offset = 0;
2277 }
2278
2279 /* ANYTHING == ANYTHING is pointless. */
2280 if (lhs.var == anything_id && rhs.var == anything_id)
2281 return;
2282
2283 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2284 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2285 {
2286 rhs = t->lhs;
2287 t->lhs = t->rhs;
2288 t->rhs = rhs;
2289 process_constraint (t);
2290 }
2291 /* This can happen in our IR with things like n->a = *p */
2292 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2293 {
2294 /* Split into tmp = *rhs, *lhs = tmp */
2295 tree rhsdecl = get_varinfo (rhs.var)->decl;
2296 tree pointertype = TREE_TYPE (rhsdecl);
2297 tree pointedtotype = TREE_TYPE (pointertype);
2298 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2299 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2300
2301 /* If this is an aggregate of known size, we should have passed
2302 this off to do_structure_copy, and it should have broken it
2303 up. */
2304 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2305 || get_varinfo (rhs.var)->is_unknown_size_var);
2306
2307 process_constraint (new_constraint (tmplhs, rhs));
2308 process_constraint (new_constraint (lhs, tmplhs));
2309 }
2310 else
2311 {
2312 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2313 VEC_safe_push (constraint_t, heap, constraints, t);
2314 }
2315 }
2316
2317 /* Return true if T is a variable of a type that could contain
2318 pointers. */
2319
2320 static bool
2321 could_have_pointers (tree t)
2322 {
2323 tree type = TREE_TYPE (t);
2324
2325 if (POINTER_TYPE_P (type)
2326 || AGGREGATE_TYPE_P (type)
2327 || TREE_CODE (type) == COMPLEX_TYPE)
2328 return true;
2329
2330 return false;
2331 }
2332
2333 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2334 structure. */
2335
2336 static unsigned HOST_WIDE_INT
2337 bitpos_of_field (const tree fdecl)
2338 {
2339
2340 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2341 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2342 return -1;
2343
2344 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2345 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2346 }
2347
2348
2349 /* Return true if an access to [ACCESSPOS, ACCESSSIZE]
2350 overlaps with a field at [FIELDPOS, FIELDSIZE] */
2351
2352 static bool
2353 offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos,
2354 const unsigned HOST_WIDE_INT fieldsize,
2355 const unsigned HOST_WIDE_INT accesspos,
2356 const unsigned HOST_WIDE_INT accesssize)
2357 {
2358 if (fieldpos == accesspos && fieldsize == accesssize)
2359 return true;
2360 if (accesspos >= fieldpos && accesspos < (fieldpos + fieldsize))
2361 return true;
2362 if (accesspos < fieldpos && (accesspos + accesssize > fieldpos))
2363 return true;
2364
2365 return false;
2366 }
2367
2368 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2369
2370 static void
2371 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2372 {
2373 tree orig_t = t;
2374 HOST_WIDE_INT bitsize = -1;
2375 HOST_WIDE_INT bitmaxsize = -1;
2376 HOST_WIDE_INT bitpos;
2377 tree forzero;
2378 struct constraint_expr *result;
2379 unsigned int beforelength = VEC_length (ce_s, *results);
2380
2381 /* Some people like to do cute things like take the address of
2382 &0->a.b */
2383 forzero = t;
2384 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2385 forzero = TREE_OPERAND (forzero, 0);
2386
2387 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2388 {
2389 struct constraint_expr temp;
2390
2391 temp.offset = 0;
2392 temp.var = integer_id;
2393 temp.type = SCALAR;
2394 VEC_safe_push (ce_s, heap, *results, &temp);
2395 return;
2396 }
2397
2398 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2399
2400 /* String constants are readonly, so there is nothing to really do
2401 here. */
2402 if (TREE_CODE (t) == STRING_CST)
2403 return;
2404
2405 get_constraint_for (t, results);
2406 result = VEC_last (ce_s, *results);
2407 result->offset = bitpos;
2408
2409 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2410
2411 /* This can also happen due to weird offsetof type macros. */
2412 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2413 result->type = SCALAR;
2414
2415 if (result->type == SCALAR)
2416 {
2417 /* In languages like C, you can access one past the end of an
2418 array. You aren't allowed to dereference it, so we can
2419 ignore this constraint. When we handle pointer subtraction,
2420 we may have to do something cute here. */
2421
2422 if (result->offset < get_varinfo (result->var)->fullsize
2423 && bitmaxsize != 0)
2424 {
2425 /* It's also not true that the constraint will actually start at the
2426 right offset, it may start in some padding. We only care about
2427 setting the constraint to the first actual field it touches, so
2428 walk to find it. */
2429 varinfo_t curr;
2430 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2431 {
2432 if (offset_overlaps_with_access (curr->offset, curr->size,
2433 result->offset, bitmaxsize))
2434 {
2435 result->var = curr->id;
2436 break;
2437 }
2438 }
2439 /* assert that we found *some* field there. The user couldn't be
2440 accessing *only* padding. */
2441 /* Still the user could access one past the end of an array
2442 embedded in a struct resulting in accessing *only* padding. */
2443 gcc_assert (curr || ref_contains_array_ref (orig_t));
2444 }
2445 else if (bitmaxsize == 0)
2446 {
2447 if (dump_file && (dump_flags & TDF_DETAILS))
2448 fprintf (dump_file, "Access to zero-sized part of variable,"
2449 "ignoring\n");
2450 }
2451 else
2452 if (dump_file && (dump_flags & TDF_DETAILS))
2453 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2454
2455 result->offset = 0;
2456 }
2457 }
2458
2459
2460 /* Dereference the constraint expression CONS, and return the result.
2461 DEREF (ADDRESSOF) = SCALAR
2462 DEREF (SCALAR) = DEREF
2463 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2464 This is needed so that we can handle dereferencing DEREF constraints. */
2465
2466 static void
2467 do_deref (VEC (ce_s, heap) **constraints)
2468 {
2469 struct constraint_expr *c;
2470 unsigned int i = 0;
2471
2472 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2473 {
2474 if (c->type == SCALAR)
2475 c->type = DEREF;
2476 else if (c->type == ADDRESSOF)
2477 c->type = SCALAR;
2478 else if (c->type == DEREF)
2479 {
2480 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2481 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2482 process_constraint (new_constraint (tmplhs, *c));
2483 c->var = tmplhs.var;
2484 }
2485 else
2486 gcc_unreachable ();
2487 }
2488 }
2489
2490 /* Given a tree T, return the constraint expression for it. */
2491
2492 static void
2493 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2494 {
2495 struct constraint_expr temp;
2496
2497 /* x = integer is all glommed to a single variable, which doesn't
2498 point to anything by itself. That is, of course, unless it is an
2499 integer constant being treated as a pointer, in which case, we
2500 will return that this is really the addressof anything. This
2501 happens below, since it will fall into the default case. The only
2502 case we know something about an integer treated like a pointer is
2503 when it is the NULL pointer, and then we just say it points to
2504 NULL. */
2505 if (TREE_CODE (t) == INTEGER_CST
2506 && !POINTER_TYPE_P (TREE_TYPE (t)))
2507 {
2508 temp.var = integer_id;
2509 temp.type = SCALAR;
2510 temp.offset = 0;
2511 VEC_safe_push (ce_s, heap, *results, &temp);
2512 return;
2513 }
2514 else if (TREE_CODE (t) == INTEGER_CST
2515 && integer_zerop (t))
2516 {
2517 temp.var = nothing_id;
2518 temp.type = ADDRESSOF;
2519 temp.offset = 0;
2520 VEC_safe_push (ce_s, heap, *results, &temp);
2521 return;
2522 }
2523
2524 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2525 {
2526 case tcc_expression:
2527 case tcc_vl_exp:
2528 {
2529 switch (TREE_CODE (t))
2530 {
2531 case ADDR_EXPR:
2532 {
2533 struct constraint_expr *c;
2534 unsigned int i;
2535 tree exp = TREE_OPERAND (t, 0);
2536 tree pttype = TREE_TYPE (TREE_TYPE (t));
2537
2538 get_constraint_for (exp, results);
2539
2540 /* Make sure we capture constraints to all elements
2541 of an array. */
2542 if ((handled_component_p (exp)
2543 && ref_contains_array_ref (exp))
2544 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE)
2545 {
2546 struct constraint_expr *origrhs;
2547 varinfo_t origvar;
2548 struct constraint_expr tmp;
2549
2550 if (VEC_length (ce_s, *results) == 0)
2551 return;
2552
2553 gcc_assert (VEC_length (ce_s, *results) == 1);
2554 origrhs = VEC_last (ce_s, *results);
2555 tmp = *origrhs;
2556 VEC_pop (ce_s, *results);
2557 origvar = get_varinfo (origrhs->var);
2558 for (; origvar; origvar = origvar->next)
2559 {
2560 tmp.var = origvar->id;
2561 VEC_safe_push (ce_s, heap, *results, &tmp);
2562 }
2563 }
2564 else if (VEC_length (ce_s, *results) == 1
2565 && (AGGREGATE_TYPE_P (pttype)
2566 || TREE_CODE (pttype) == COMPLEX_TYPE))
2567 {
2568 struct constraint_expr *origrhs;
2569 varinfo_t origvar;
2570 struct constraint_expr tmp;
2571
2572 gcc_assert (VEC_length (ce_s, *results) == 1);
2573 origrhs = VEC_last (ce_s, *results);
2574 tmp = *origrhs;
2575 VEC_pop (ce_s, *results);
2576 origvar = get_varinfo (origrhs->var);
2577 for (; origvar; origvar = origvar->next)
2578 {
2579 tmp.var = origvar->id;
2580 VEC_safe_push (ce_s, heap, *results, &tmp);
2581 }
2582 }
2583
2584 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2585 {
2586 if (c->type == DEREF)
2587 c->type = SCALAR;
2588 else
2589 c->type = ADDRESSOF;
2590 }
2591 return;
2592 }
2593 break;
2594 case CALL_EXPR:
2595 /* XXX: In interprocedural mode, if we didn't have the
2596 body, we would need to do *each pointer argument =
2597 &ANYTHING added. */
2598 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2599 {
2600 varinfo_t vi;
2601 tree heapvar = heapvar_lookup (t);
2602
2603 if (heapvar == NULL)
2604 {
2605 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2606 DECL_EXTERNAL (heapvar) = 1;
2607 get_var_ann (heapvar)->is_heapvar = 1;
2608 if (gimple_referenced_vars (cfun))
2609 add_referenced_var (heapvar);
2610 heapvar_insert (t, heapvar);
2611 }
2612
2613 temp.var = create_variable_info_for (heapvar,
2614 alias_get_name (heapvar));
2615
2616 vi = get_varinfo (temp.var);
2617 vi->is_artificial_var = 1;
2618 vi->is_heap_var = 1;
2619 temp.type = ADDRESSOF;
2620 temp.offset = 0;
2621 VEC_safe_push (ce_s, heap, *results, &temp);
2622 return;
2623 }
2624 else
2625 {
2626 temp.var = anything_id;
2627 temp.type = SCALAR;
2628 temp.offset = 0;
2629 VEC_safe_push (ce_s, heap, *results, &temp);
2630 return;
2631 }
2632 break;
2633 default:
2634 {
2635 temp.type = ADDRESSOF;
2636 temp.var = anything_id;
2637 temp.offset = 0;
2638 VEC_safe_push (ce_s, heap, *results, &temp);
2639 return;
2640 }
2641 }
2642 }
2643 case tcc_reference:
2644 {
2645 switch (TREE_CODE (t))
2646 {
2647 case INDIRECT_REF:
2648 {
2649 get_constraint_for (TREE_OPERAND (t, 0), results);
2650 do_deref (results);
2651 return;
2652 }
2653 case ARRAY_REF:
2654 case ARRAY_RANGE_REF:
2655 case COMPONENT_REF:
2656 get_constraint_for_component_ref (t, results);
2657 return;
2658 default:
2659 {
2660 temp.type = ADDRESSOF;
2661 temp.var = anything_id;
2662 temp.offset = 0;
2663 VEC_safe_push (ce_s, heap, *results, &temp);
2664 return;
2665 }
2666 }
2667 }
2668 case tcc_unary:
2669 {
2670 switch (TREE_CODE (t))
2671 {
2672 case NOP_EXPR:
2673 case CONVERT_EXPR:
2674 case NON_LVALUE_EXPR:
2675 {
2676 tree op = TREE_OPERAND (t, 0);
2677
2678 /* Cast from non-pointer to pointers are bad news for us.
2679 Anything else, we see through */
2680 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2681 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2682 {
2683 get_constraint_for (op, results);
2684 return;
2685 }
2686
2687 /* FALLTHRU */
2688 }
2689 default:
2690 {
2691 temp.type = ADDRESSOF;
2692 temp.var = anything_id;
2693 temp.offset = 0;
2694 VEC_safe_push (ce_s, heap, *results, &temp);
2695 return;
2696 }
2697 }
2698 }
2699 case tcc_exceptional:
2700 {
2701 switch (TREE_CODE (t))
2702 {
2703 case PHI_NODE:
2704 {
2705 get_constraint_for (PHI_RESULT (t), results);
2706 return;
2707 }
2708 break;
2709 case SSA_NAME:
2710 {
2711 struct constraint_expr temp;
2712 temp = get_constraint_exp_from_ssa_var (t);
2713 VEC_safe_push (ce_s, heap, *results, &temp);
2714 return;
2715 }
2716 break;
2717 default:
2718 {
2719 temp.type = ADDRESSOF;
2720 temp.var = anything_id;
2721 temp.offset = 0;
2722 VEC_safe_push (ce_s, heap, *results, &temp);
2723 return;
2724 }
2725 }
2726 }
2727 case tcc_declaration:
2728 {
2729 struct constraint_expr temp;
2730 temp = get_constraint_exp_from_ssa_var (t);
2731 VEC_safe_push (ce_s, heap, *results, &temp);
2732 return;
2733 }
2734 default:
2735 {
2736 temp.type = ADDRESSOF;
2737 temp.var = anything_id;
2738 temp.offset = 0;
2739 VEC_safe_push (ce_s, heap, *results, &temp);
2740 return;
2741 }
2742 }
2743 }
2744
2745
2746 /* Handle the structure copy case where we have a simple structure copy
2747 between LHS and RHS that is of SIZE (in bits)
2748
2749 For each field of the lhs variable (lhsfield)
2750 For each field of the rhs variable at lhsfield.offset (rhsfield)
2751 add the constraint lhsfield = rhsfield
2752
2753 If we fail due to some kind of type unsafety or other thing we
2754 can't handle, return false. We expect the caller to collapse the
2755 variable in that case. */
2756
2757 static bool
2758 do_simple_structure_copy (const struct constraint_expr lhs,
2759 const struct constraint_expr rhs,
2760 const unsigned HOST_WIDE_INT size)
2761 {
2762 varinfo_t p = get_varinfo (lhs.var);
2763 unsigned HOST_WIDE_INT pstart, last;
2764 pstart = p->offset;
2765 last = p->offset + size;
2766 for (; p && p->offset < last; p = p->next)
2767 {
2768 varinfo_t q;
2769 struct constraint_expr templhs = lhs;
2770 struct constraint_expr temprhs = rhs;
2771 unsigned HOST_WIDE_INT fieldoffset;
2772
2773 templhs.var = p->id;
2774 q = get_varinfo (temprhs.var);
2775 fieldoffset = p->offset - pstart;
2776 q = first_vi_for_offset (q, q->offset + fieldoffset);
2777 if (!q)
2778 return false;
2779 temprhs.var = q->id;
2780 process_constraint (new_constraint (templhs, temprhs));
2781 }
2782 return true;
2783 }
2784
2785
2786 /* Handle the structure copy case where we have a structure copy between a
2787 aggregate on the LHS and a dereference of a pointer on the RHS
2788 that is of SIZE (in bits)
2789
2790 For each field of the lhs variable (lhsfield)
2791 rhs.offset = lhsfield->offset
2792 add the constraint lhsfield = rhs
2793 */
2794
2795 static void
2796 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
2797 const struct constraint_expr rhs,
2798 const unsigned HOST_WIDE_INT size)
2799 {
2800 varinfo_t p = get_varinfo (lhs.var);
2801 unsigned HOST_WIDE_INT pstart,last;
2802 pstart = p->offset;
2803 last = p->offset + size;
2804
2805 for (; p && p->offset < last; p = p->next)
2806 {
2807 varinfo_t q;
2808 struct constraint_expr templhs = lhs;
2809 struct constraint_expr temprhs = rhs;
2810 unsigned HOST_WIDE_INT fieldoffset;
2811
2812
2813 if (templhs.type == SCALAR)
2814 templhs.var = p->id;
2815 else
2816 templhs.offset = p->offset;
2817
2818 q = get_varinfo (temprhs.var);
2819 fieldoffset = p->offset - pstart;
2820 temprhs.offset += fieldoffset;
2821 process_constraint (new_constraint (templhs, temprhs));
2822 }
2823 }
2824
2825 /* Handle the structure copy case where we have a structure copy
2826 between an aggregate on the RHS and a dereference of a pointer on
2827 the LHS that is of SIZE (in bits)
2828
2829 For each field of the rhs variable (rhsfield)
2830 lhs.offset = rhsfield->offset
2831 add the constraint lhs = rhsfield
2832 */
2833
2834 static void
2835 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
2836 const struct constraint_expr rhs,
2837 const unsigned HOST_WIDE_INT size)
2838 {
2839 varinfo_t p = get_varinfo (rhs.var);
2840 unsigned HOST_WIDE_INT pstart,last;
2841 pstart = p->offset;
2842 last = p->offset + size;
2843
2844 for (; p && p->offset < last; p = p->next)
2845 {
2846 varinfo_t q;
2847 struct constraint_expr templhs = lhs;
2848 struct constraint_expr temprhs = rhs;
2849 unsigned HOST_WIDE_INT fieldoffset;
2850
2851
2852 if (temprhs.type == SCALAR)
2853 temprhs.var = p->id;
2854 else
2855 temprhs.offset = p->offset;
2856
2857 q = get_varinfo (templhs.var);
2858 fieldoffset = p->offset - pstart;
2859 templhs.offset += fieldoffset;
2860 process_constraint (new_constraint (templhs, temprhs));
2861 }
2862 }
2863
2864 /* Sometimes, frontends like to give us bad type information. This
2865 function will collapse all the fields from VAR to the end of VAR,
2866 into VAR, so that we treat those fields as a single variable.
2867 We return the variable they were collapsed into. */
2868
2869 static unsigned int
2870 collapse_rest_of_var (unsigned int var)
2871 {
2872 varinfo_t currvar = get_varinfo (var);
2873 varinfo_t field;
2874
2875 for (field = currvar->next; field; field = field->next)
2876 {
2877 if (dump_file)
2878 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
2879 field->name, currvar->name);
2880
2881 gcc_assert (!field->collapsed_to);
2882 field->collapsed_to = currvar;
2883 }
2884
2885 currvar->next = NULL;
2886 currvar->size = currvar->fullsize - currvar->offset;
2887
2888 return currvar->id;
2889 }
2890
2891 /* Handle aggregate copies by expanding into copies of the respective
2892 fields of the structures. */
2893
2894 static void
2895 do_structure_copy (tree lhsop, tree rhsop)
2896 {
2897 struct constraint_expr lhs, rhs, tmp;
2898 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
2899 varinfo_t p;
2900 unsigned HOST_WIDE_INT lhssize;
2901 unsigned HOST_WIDE_INT rhssize;
2902
2903 get_constraint_for (lhsop, &lhsc);
2904 get_constraint_for (rhsop, &rhsc);
2905 gcc_assert (VEC_length (ce_s, lhsc) == 1);
2906 gcc_assert (VEC_length (ce_s, rhsc) == 1);
2907 lhs = *(VEC_last (ce_s, lhsc));
2908 rhs = *(VEC_last (ce_s, rhsc));
2909
2910 VEC_free (ce_s, heap, lhsc);
2911 VEC_free (ce_s, heap, rhsc);
2912
2913 /* If we have special var = x, swap it around. */
2914 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
2915 {
2916 tmp = lhs;
2917 lhs = rhs;
2918 rhs = tmp;
2919 }
2920
2921 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
2922 possible it's something we could handle. However, most cases falling
2923 into this are dealing with transparent unions, which are slightly
2924 weird. */
2925 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
2926 {
2927 rhs.type = ADDRESSOF;
2928 rhs.var = anything_id;
2929 }
2930
2931 /* If the RHS is a special var, or an addressof, set all the LHS fields to
2932 that special var. */
2933 if (rhs.var <= integer_id)
2934 {
2935 for (p = get_varinfo (lhs.var); p; p = p->next)
2936 {
2937 struct constraint_expr templhs = lhs;
2938 struct constraint_expr temprhs = rhs;
2939
2940 if (templhs.type == SCALAR )
2941 templhs.var = p->id;
2942 else
2943 templhs.offset += p->offset;
2944 process_constraint (new_constraint (templhs, temprhs));
2945 }
2946 }
2947 else
2948 {
2949 tree rhstype = TREE_TYPE (rhsop);
2950 tree lhstype = TREE_TYPE (lhsop);
2951 tree rhstypesize;
2952 tree lhstypesize;
2953
2954 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
2955 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
2956
2957 /* If we have a variably sized types on the rhs or lhs, and a deref
2958 constraint, add the constraint, lhsconstraint = &ANYTHING.
2959 This is conservatively correct because either the lhs is an unknown
2960 sized var (if the constraint is SCALAR), or the lhs is a DEREF
2961 constraint, and every variable it can point to must be unknown sized
2962 anyway, so we don't need to worry about fields at all. */
2963 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
2964 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
2965 {
2966 rhs.var = anything_id;
2967 rhs.type = ADDRESSOF;
2968 rhs.offset = 0;
2969 process_constraint (new_constraint (lhs, rhs));
2970 return;
2971 }
2972
2973 /* The size only really matters insofar as we don't set more or less of
2974 the variable. If we hit an unknown size var, the size should be the
2975 whole darn thing. */
2976 if (get_varinfo (rhs.var)->is_unknown_size_var)
2977 rhssize = ~0;
2978 else
2979 rhssize = TREE_INT_CST_LOW (rhstypesize);
2980
2981 if (get_varinfo (lhs.var)->is_unknown_size_var)
2982 lhssize = ~0;
2983 else
2984 lhssize = TREE_INT_CST_LOW (lhstypesize);
2985
2986
2987 if (rhs.type == SCALAR && lhs.type == SCALAR)
2988 {
2989 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
2990 {
2991 lhs.var = collapse_rest_of_var (lhs.var);
2992 rhs.var = collapse_rest_of_var (rhs.var);
2993 lhs.offset = 0;
2994 rhs.offset = 0;
2995 lhs.type = SCALAR;
2996 rhs.type = SCALAR;
2997 process_constraint (new_constraint (lhs, rhs));
2998 }
2999 }
3000 else if (lhs.type != DEREF && rhs.type == DEREF)
3001 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3002 else if (lhs.type == DEREF && rhs.type != DEREF)
3003 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3004 else
3005 {
3006 tree pointedtotype = lhstype;
3007 tree tmpvar;
3008
3009 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3010 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3011 do_structure_copy (tmpvar, rhsop);
3012 do_structure_copy (lhsop, tmpvar);
3013 }
3014 }
3015 }
3016
3017
3018 /* Update related alias information kept in AI. This is used when
3019 building name tags, alias sets and deciding grouping heuristics.
3020 STMT is the statement to process. This function also updates
3021 ADDRESSABLE_VARS. */
3022
3023 static void
3024 update_alias_info (tree stmt, struct alias_info *ai)
3025 {
3026 bitmap addr_taken;
3027 use_operand_p use_p;
3028 ssa_op_iter iter;
3029 bool stmt_dereferences_ptr_p;
3030 enum escape_type stmt_escape_type = is_escape_site (stmt);
3031 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
3032
3033 stmt_dereferences_ptr_p = false;
3034
3035 if (stmt_escape_type == ESCAPE_TO_CALL
3036 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
3037 {
3038 mem_ref_stats->num_call_sites++;
3039 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
3040 mem_ref_stats->num_pure_const_call_sites++;
3041 }
3042 else if (stmt_escape_type == ESCAPE_TO_ASM)
3043 mem_ref_stats->num_asm_sites++;
3044
3045 /* Mark all the variables whose address are taken by the statement. */
3046 addr_taken = addresses_taken (stmt);
3047 if (addr_taken)
3048 {
3049 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
3050
3051 /* If STMT is an escape point, all the addresses taken by it are
3052 call-clobbered. */
3053 if (stmt_escape_type != NO_ESCAPE)
3054 {
3055 bitmap_iterator bi;
3056 unsigned i;
3057
3058 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
3059 {
3060 tree rvar = referenced_var (i);
3061 if (!unmodifiable_var_p (rvar))
3062 mark_call_clobbered (rvar, stmt_escape_type);
3063 }
3064 }
3065 }
3066
3067 /* Process each operand use. For pointers, determine whether they
3068 are dereferenced by the statement, or whether their value
3069 escapes, etc. */
3070 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
3071 {
3072 tree op, var;
3073 var_ann_t v_ann;
3074 struct ptr_info_def *pi;
3075 unsigned num_uses, num_loads, num_stores;
3076
3077 op = USE_FROM_PTR (use_p);
3078
3079 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3080 to the set of addressable variables. */
3081 if (TREE_CODE (op) == ADDR_EXPR)
3082 {
3083 bitmap addressable_vars = gimple_addressable_vars (cfun);
3084
3085 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3086 gcc_assert (addressable_vars);
3087
3088 /* PHI nodes don't have annotations for pinning the set
3089 of addresses taken, so we collect them here.
3090
3091 FIXME, should we allow PHI nodes to have annotations
3092 so that they can be treated like regular statements?
3093 Currently, they are treated as second-class
3094 statements. */
3095 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
3096 continue;
3097 }
3098
3099 /* Ignore constants (they may occur in PHI node arguments). */
3100 if (TREE_CODE (op) != SSA_NAME)
3101 continue;
3102
3103 var = SSA_NAME_VAR (op);
3104 v_ann = var_ann (var);
3105
3106 /* The base variable of an SSA name must be a GIMPLE register, and thus
3107 it cannot be aliased. */
3108 gcc_assert (!may_be_aliased (var));
3109
3110 /* We are only interested in pointers. */
3111 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3112 continue;
3113
3114 pi = get_ptr_info (op);
3115
3116 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3117 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3118 {
3119 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3120 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
3121 }
3122
3123 /* If STMT is a PHI node, then it will not have pointer
3124 dereferences and it will not be an escape point. */
3125 if (TREE_CODE (stmt) == PHI_NODE)
3126 continue;
3127
3128 /* Determine whether OP is a dereferenced pointer, and if STMT
3129 is an escape point, whether OP escapes. */
3130 count_uses_and_derefs (op, stmt, &num_uses, &num_loads, &num_stores);
3131
3132 /* Handle a corner case involving address expressions of the
3133 form '&PTR->FLD'. The problem with these expressions is that
3134 they do not represent a dereference of PTR. However, if some
3135 other transformation propagates them into an INDIRECT_REF
3136 expression, we end up with '*(&PTR->FLD)' which is folded
3137 into 'PTR->FLD'.
3138
3139 So, if the original code had no other dereferences of PTR,
3140 the aliaser will not create memory tags for it, and when
3141 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3142 memory operations will receive no VDEF/VUSE operands.
3143
3144 One solution would be to have count_uses_and_derefs consider
3145 &PTR->FLD a dereference of PTR. But that is wrong, since it
3146 is not really a dereference but an offset calculation.
3147
3148 What we do here is to recognize these special ADDR_EXPR
3149 nodes. Since these expressions are never GIMPLE values (they
3150 are not GIMPLE invariants), they can only appear on the RHS
3151 of an assignment and their base address is always an
3152 INDIRECT_REF expression. */
3153 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3154 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
3155 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
3156 {
3157 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3158 this represents a potential dereference of PTR. */
3159 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3160 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3161 if (TREE_CODE (base) == INDIRECT_REF
3162 && TREE_OPERAND (base, 0) == op)
3163 num_loads++;
3164 }
3165
3166 if (num_loads + num_stores > 0)
3167 {
3168 /* Mark OP as dereferenced. In a subsequent pass,
3169 dereferenced pointers that point to a set of
3170 variables will be assigned a name tag to alias
3171 all the variables OP points to. */
3172 pi->is_dereferenced = 1;
3173
3174 /* If this is a store operation, mark OP as being
3175 dereferenced to store, otherwise mark it as being
3176 dereferenced to load. */
3177 if (num_stores > 0)
3178 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3179 else
3180 pointer_set_insert (ai->dereferenced_ptrs_load, var);
3181
3182 /* Update the frequency estimate for all the dereferences of
3183 pointer OP. */
3184 update_mem_sym_stats_from_stmt (op, stmt, num_loads, num_stores);
3185
3186 /* Indicate that STMT contains pointer dereferences. */
3187 stmt_dereferences_ptr_p = true;
3188 }
3189
3190 if (stmt_escape_type != NO_ESCAPE && num_loads + num_stores < num_uses)
3191 {
3192 /* If STMT is an escape point and STMT contains at
3193 least one direct use of OP, then the value of OP
3194 escapes and so the pointed-to variables need to
3195 be marked call-clobbered. */
3196 pi->value_escapes_p = 1;
3197 pi->escape_mask |= stmt_escape_type;
3198
3199 /* If the statement makes a function call, assume
3200 that pointer OP will be dereferenced in a store
3201 operation inside the called function. */
3202 if (get_call_expr_in (stmt)
3203 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3204 {
3205 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3206 pi->is_dereferenced = 1;
3207 }
3208 }
3209 }
3210
3211 if (TREE_CODE (stmt) == PHI_NODE)
3212 return;
3213
3214 /* Mark stored variables in STMT as being written to and update the
3215 memory reference stats for all memory symbols referenced by STMT. */
3216 if (stmt_references_memory_p (stmt))
3217 {
3218 unsigned i;
3219 bitmap_iterator bi;
3220
3221 mem_ref_stats->num_mem_stmts++;
3222
3223 /* Notice that we only update memory reference stats for symbols
3224 loaded and stored by the statement if the statement does not
3225 contain pointer dereferences and it is not a call/asm site.
3226 This is to avoid double accounting problems when creating
3227 memory partitions. After computing points-to information,
3228 pointer dereference statistics are used to update the
3229 reference stats of the pointed-to variables, so here we
3230 should only update direct references to symbols.
3231
3232 Indirect references are not updated here for two reasons: (1)
3233 The first time we compute alias information, the sets
3234 LOADED/STORED are empty for pointer dereferences, (2) After
3235 partitioning, LOADED/STORED may have references to
3236 partitions, not the original pointed-to variables. So, if we
3237 always counted LOADED/STORED here and during partitioning, we
3238 would count many symbols more than once.
3239
3240 This does cause some imprecision when a statement has a
3241 combination of direct symbol references and pointer
3242 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
3243 memory symbols in its argument list, but these cases do not
3244 occur so frequently as to constitute a serious problem. */
3245 if (STORED_SYMS (stmt))
3246 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3247 {
3248 tree sym = referenced_var (i);
3249 pointer_set_insert (ai->written_vars, sym);
3250 if (!stmt_dereferences_ptr_p
3251 && stmt_escape_type != ESCAPE_TO_CALL
3252 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3253 && stmt_escape_type != ESCAPE_TO_ASM)
3254 update_mem_sym_stats_from_stmt (sym, stmt, 0, 1);
3255 }
3256
3257 if (!stmt_dereferences_ptr_p
3258 && LOADED_SYMS (stmt)
3259 && stmt_escape_type != ESCAPE_TO_CALL
3260 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3261 && stmt_escape_type != ESCAPE_TO_ASM)
3262 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
3263 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 1, 0);
3264 }
3265 }
3266
3267
3268 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3269 Expressions of the type PTR + CST can be handled in two ways:
3270
3271 1- If the constraint for PTR is ADDRESSOF for a non-structure
3272 variable, then we can use it directly because adding or
3273 subtracting a constant may not alter the original ADDRESSOF
3274 constraint (i.e., pointer arithmetic may not legally go outside
3275 an object's boundaries).
3276
3277 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3278 then if CST is a compile-time constant that can be used as an
3279 offset, we can determine which sub-variable will be pointed-to
3280 by the expression.
3281
3282 Return true if the expression is handled. For any other kind of
3283 expression, return false so that each operand can be added as a
3284 separate constraint by the caller. */
3285
3286 static bool
3287 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3288 {
3289 tree op0, op1;
3290 struct constraint_expr *c, *c2;
3291 unsigned int i = 0;
3292 unsigned int j = 0;
3293 VEC (ce_s, heap) *temp = NULL;
3294 unsigned int rhsoffset = 0;
3295
3296 if (TREE_CODE (expr) != POINTER_PLUS_EXPR)
3297 return false;
3298
3299 op0 = TREE_OPERAND (expr, 0);
3300 op1 = TREE_OPERAND (expr, 1);
3301 gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0)));
3302
3303 get_constraint_for (op0, &temp);
3304
3305 if (TREE_CODE (op1) == INTEGER_CST)
3306 rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
3307
3308 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3309 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3310 {
3311 if (c2->type == ADDRESSOF && rhsoffset != 0)
3312 {
3313 varinfo_t temp = get_varinfo (c2->var);
3314
3315 /* An access one after the end of an array is valid,
3316 so simply punt on accesses we cannot resolve. */
3317 temp = first_vi_for_offset (temp, rhsoffset);
3318 if (temp == NULL)
3319 continue;
3320 c2->var = temp->id;
3321 c2->offset = 0;
3322 }
3323 else
3324 c2->offset = rhsoffset;
3325 process_constraint (new_constraint (*c, *c2));
3326 }
3327
3328 VEC_free (ce_s, heap, temp);
3329
3330 return true;
3331 }
3332
3333
3334 /* Walk statement T setting up aliasing constraints according to the
3335 references found in T. This function is the main part of the
3336 constraint builder. AI points to auxiliary alias information used
3337 when building alias sets and computing alias grouping heuristics. */
3338
3339 static void
3340 find_func_aliases (tree origt)
3341 {
3342 tree t = origt;
3343 VEC(ce_s, heap) *lhsc = NULL;
3344 VEC(ce_s, heap) *rhsc = NULL;
3345 struct constraint_expr *c;
3346
3347 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3348 t = TREE_OPERAND (t, 0);
3349
3350 /* Now build constraints expressions. */
3351 if (TREE_CODE (t) == PHI_NODE)
3352 {
3353 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3354
3355 /* Only care about pointers and structures containing
3356 pointers. */
3357 if (could_have_pointers (PHI_RESULT (t)))
3358 {
3359 int i;
3360 unsigned int j;
3361
3362 /* For a phi node, assign all the arguments to
3363 the result. */
3364 get_constraint_for (PHI_RESULT (t), &lhsc);
3365 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3366 {
3367 tree rhstype;
3368 tree strippedrhs = PHI_ARG_DEF (t, i);
3369
3370 STRIP_NOPS (strippedrhs);
3371 rhstype = TREE_TYPE (strippedrhs);
3372 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3373
3374 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3375 {
3376 struct constraint_expr *c2;
3377 while (VEC_length (ce_s, rhsc) > 0)
3378 {
3379 c2 = VEC_last (ce_s, rhsc);
3380 process_constraint (new_constraint (*c, *c2));
3381 VEC_pop (ce_s, rhsc);
3382 }
3383 }
3384 }
3385 }
3386 }
3387 /* In IPA mode, we need to generate constraints to pass call
3388 arguments through their calls. There are two cases, either a
3389 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3390 CALL_EXPR when we are not. */
3391 else if (in_ipa_mode
3392 && ((TREE_CODE (t) == GIMPLE_MODIFY_STMT
3393 && TREE_CODE (GIMPLE_STMT_OPERAND (t, 1)) == CALL_EXPR
3394 && !(call_expr_flags (GIMPLE_STMT_OPERAND (t, 1))
3395 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3396 || (TREE_CODE (t) == CALL_EXPR
3397 && !(call_expr_flags (t)
3398 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3399 {
3400 tree lhsop;
3401 tree rhsop;
3402 tree arg;
3403 call_expr_arg_iterator iter;
3404 varinfo_t fi;
3405 int i = 1;
3406 tree decl;
3407 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3408 {
3409 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3410 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3411 }
3412 else
3413 {
3414 lhsop = NULL;
3415 rhsop = t;
3416 }
3417 decl = get_callee_fndecl (rhsop);
3418
3419 /* If we can directly resolve the function being called, do so.
3420 Otherwise, it must be some sort of indirect expression that
3421 we should still be able to handle. */
3422 if (decl)
3423 {
3424 fi = get_vi_for_tree (decl);
3425 }
3426 else
3427 {
3428 decl = CALL_EXPR_FN (rhsop);
3429 fi = get_vi_for_tree (decl);
3430 }
3431
3432 /* Assign all the passed arguments to the appropriate incoming
3433 parameters of the function. */
3434
3435 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
3436 {
3437 struct constraint_expr lhs ;
3438 struct constraint_expr *rhsp;
3439
3440 get_constraint_for (arg, &rhsc);
3441 if (TREE_CODE (decl) != FUNCTION_DECL)
3442 {
3443 lhs.type = DEREF;
3444 lhs.var = fi->id;
3445 lhs.offset = i;
3446 }
3447 else
3448 {
3449 lhs.type = SCALAR;
3450 lhs.var = first_vi_for_offset (fi, i)->id;
3451 lhs.offset = 0;
3452 }
3453 while (VEC_length (ce_s, rhsc) != 0)
3454 {
3455 rhsp = VEC_last (ce_s, rhsc);
3456 process_constraint (new_constraint (lhs, *rhsp));
3457 VEC_pop (ce_s, rhsc);
3458 }
3459 i++;
3460 }
3461
3462 /* If we are returning a value, assign it to the result. */
3463 if (lhsop)
3464 {
3465 struct constraint_expr rhs;
3466 struct constraint_expr *lhsp;
3467 unsigned int j = 0;
3468
3469 get_constraint_for (lhsop, &lhsc);
3470 if (TREE_CODE (decl) != FUNCTION_DECL)
3471 {
3472 rhs.type = DEREF;
3473 rhs.var = fi->id;
3474 rhs.offset = i;
3475 }
3476 else
3477 {
3478 rhs.type = SCALAR;
3479 rhs.var = first_vi_for_offset (fi, i)->id;
3480 rhs.offset = 0;
3481 }
3482 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3483 process_constraint (new_constraint (*lhsp, rhs));
3484 }
3485 }
3486 /* Otherwise, just a regular assignment statement. */
3487 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3488 {
3489 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3490 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3491 int i;
3492
3493 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3494 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3495 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3496 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3497 {
3498 do_structure_copy (lhsop, rhsop);
3499 }
3500 else
3501 {
3502 /* Only care about operations with pointers, structures
3503 containing pointers, dereferences, and call expressions. */
3504 if (could_have_pointers (lhsop)
3505 || TREE_CODE (rhsop) == CALL_EXPR)
3506 {
3507 get_constraint_for (lhsop, &lhsc);
3508 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3509 {
3510 /* RHS that consist of unary operations,
3511 exceptional types, or bare decls/constants, get
3512 handled directly by get_constraint_for. */
3513 case tcc_reference:
3514 case tcc_declaration:
3515 case tcc_constant:
3516 case tcc_exceptional:
3517 case tcc_expression:
3518 case tcc_vl_exp:
3519 case tcc_unary:
3520 {
3521 unsigned int j;
3522
3523 get_constraint_for (rhsop, &rhsc);
3524 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3525 {
3526 struct constraint_expr *c2;
3527 unsigned int k;
3528
3529 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3530 process_constraint (new_constraint (*c, *c2));
3531 }
3532
3533 }
3534 break;
3535
3536 case tcc_binary:
3537 {
3538 /* For pointer arithmetic of the form
3539 PTR + CST, we can simply use PTR's
3540 constraint because pointer arithmetic is
3541 not allowed to go out of bounds. */
3542 if (handle_ptr_arith (lhsc, rhsop))
3543 break;
3544 }
3545 /* FALLTHRU */
3546
3547 /* Otherwise, walk each operand. Notice that we
3548 can't use the operand interface because we need
3549 to process expressions other than simple operands
3550 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3551 default:
3552 for (i = 0; i < TREE_OPERAND_LENGTH (rhsop); i++)
3553 {
3554 tree op = TREE_OPERAND (rhsop, i);
3555 unsigned int j;
3556
3557 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3558 get_constraint_for (op, &rhsc);
3559 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3560 {
3561 struct constraint_expr *c2;
3562 while (VEC_length (ce_s, rhsc) > 0)
3563 {
3564 c2 = VEC_last (ce_s, rhsc);
3565 process_constraint (new_constraint (*c, *c2));
3566 VEC_pop (ce_s, rhsc);
3567 }
3568 }
3569 }
3570 }
3571 }
3572 }
3573 }
3574 else if (TREE_CODE (t) == CHANGE_DYNAMIC_TYPE_EXPR)
3575 {
3576 unsigned int j;
3577
3578 get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t), &lhsc);
3579 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3580 get_varinfo (c->var)->no_tbaa_pruning = true;
3581 }
3582
3583 /* After promoting variables and computing aliasing we will
3584 need to re-scan most statements. FIXME: Try to minimize the
3585 number of statements re-scanned. It's not really necessary to
3586 re-scan *all* statements. */
3587 mark_stmt_modified (origt);
3588 VEC_free (ce_s, heap, rhsc);
3589 VEC_free (ce_s, heap, lhsc);
3590 }
3591
3592
3593 /* Find the first varinfo in the same variable as START that overlaps with
3594 OFFSET.
3595 Effectively, walk the chain of fields for the variable START to find the
3596 first field that overlaps with OFFSET.
3597 Return NULL if we can't find one. */
3598
3599 static varinfo_t
3600 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3601 {
3602 varinfo_t curr = start;
3603 while (curr)
3604 {
3605 /* We may not find a variable in the field list with the actual
3606 offset when when we have glommed a structure to a variable.
3607 In that case, however, offset should still be within the size
3608 of the variable. */
3609 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3610 return curr;
3611 curr = curr->next;
3612 }
3613 return NULL;
3614 }
3615
3616
3617 /* Insert the varinfo FIELD into the field list for BASE, at the front
3618 of the list. */
3619
3620 static void
3621 insert_into_field_list (varinfo_t base, varinfo_t field)
3622 {
3623 varinfo_t prev = base;
3624 varinfo_t curr = base->next;
3625
3626 field->next = curr;
3627 prev->next = field;
3628 }
3629
3630 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3631 offset. */
3632
3633 static void
3634 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3635 {
3636 varinfo_t prev = base;
3637 varinfo_t curr = base->next;
3638
3639 if (curr == NULL)
3640 {
3641 prev->next = field;
3642 field->next = NULL;
3643 }
3644 else
3645 {
3646 while (curr)
3647 {
3648 if (field->offset <= curr->offset)
3649 break;
3650 prev = curr;
3651 curr = curr->next;
3652 }
3653 field->next = prev->next;
3654 prev->next = field;
3655 }
3656 }
3657
3658 /* qsort comparison function for two fieldoff's PA and PB */
3659
3660 static int
3661 fieldoff_compare (const void *pa, const void *pb)
3662 {
3663 const fieldoff_s *foa = (const fieldoff_s *)pa;
3664 const fieldoff_s *fob = (const fieldoff_s *)pb;
3665 HOST_WIDE_INT foasize, fobsize;
3666
3667 if (foa->offset != fob->offset)
3668 return foa->offset - fob->offset;
3669
3670 foasize = TREE_INT_CST_LOW (foa->size);
3671 fobsize = TREE_INT_CST_LOW (fob->size);
3672 return foasize - fobsize;
3673 }
3674
3675 /* Sort a fieldstack according to the field offset and sizes. */
3676 void
3677 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
3678 {
3679 qsort (VEC_address (fieldoff_s, fieldstack),
3680 VEC_length (fieldoff_s, fieldstack),
3681 sizeof (fieldoff_s),
3682 fieldoff_compare);
3683 }
3684
3685 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
3686 of TYPE onto fieldstack, recording their offsets along the way.
3687 OFFSET is used to keep track of the offset in this entire structure, rather
3688 than just the immediately containing structure. Returns the number
3689 of fields pushed.
3690 HAS_UNION is set to true if we find a union type as a field of
3691 TYPE. */
3692
3693 int
3694 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
3695 HOST_WIDE_INT offset, bool *has_union)
3696 {
3697 tree field;
3698 int count = 0;
3699
3700 if (TREE_CODE (type) == COMPLEX_TYPE)
3701 {
3702 fieldoff_s *real_part, *img_part;
3703 real_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3704 real_part->type = TREE_TYPE (type);
3705 real_part->size = TYPE_SIZE (TREE_TYPE (type));
3706 real_part->offset = offset;
3707 real_part->decl = NULL_TREE;
3708
3709 img_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3710 img_part->type = TREE_TYPE (type);
3711 img_part->size = TYPE_SIZE (TREE_TYPE (type));
3712 img_part->offset = offset + TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (type)));
3713 img_part->decl = NULL_TREE;
3714
3715 return 2;
3716 }
3717
3718 if (TREE_CODE (type) == ARRAY_TYPE)
3719 {
3720 tree sz = TYPE_SIZE (type);
3721 tree elsz = TYPE_SIZE (TREE_TYPE (type));
3722 HOST_WIDE_INT nr;
3723 int i;
3724
3725 if (! sz
3726 || ! host_integerp (sz, 1)
3727 || TREE_INT_CST_LOW (sz) == 0
3728 || ! elsz
3729 || ! host_integerp (elsz, 1)
3730 || TREE_INT_CST_LOW (elsz) == 0)
3731 return 0;
3732
3733 nr = TREE_INT_CST_LOW (sz) / TREE_INT_CST_LOW (elsz);
3734 if (nr > SALIAS_MAX_ARRAY_ELEMENTS)
3735 return 0;
3736
3737 for (i = 0; i < nr; ++i)
3738 {
3739 bool push = false;
3740 int pushed = 0;
3741
3742 if (has_union
3743 && (TREE_CODE (TREE_TYPE (type)) == QUAL_UNION_TYPE
3744 || TREE_CODE (TREE_TYPE (type)) == UNION_TYPE))
3745 *has_union = true;
3746
3747 if (!AGGREGATE_TYPE_P (TREE_TYPE (type))) /* var_can_have_subvars */
3748 push = true;
3749 else if (!(pushed = push_fields_onto_fieldstack
3750 (TREE_TYPE (type), fieldstack,
3751 offset + i * TREE_INT_CST_LOW (elsz), has_union)))
3752 /* Empty structures may have actual size, like in C++. So
3753 see if we didn't push any subfields and the size is
3754 nonzero, push the field onto the stack */
3755 push = true;
3756
3757 if (push)
3758 {
3759 fieldoff_s *pair;
3760
3761 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3762 pair->type = TREE_TYPE (type);
3763 pair->size = elsz;
3764 pair->decl = NULL_TREE;
3765 pair->offset = offset + i * TREE_INT_CST_LOW (elsz);
3766 count++;
3767 }
3768 else
3769 count += pushed;
3770 }
3771
3772 return count;
3773 }
3774
3775 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
3776 if (TREE_CODE (field) == FIELD_DECL)
3777 {
3778 bool push = false;
3779 int pushed = 0;
3780
3781 if (has_union
3782 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
3783 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
3784 *has_union = true;
3785
3786 if (!var_can_have_subvars (field))
3787 push = true;
3788 else if (!(pushed = push_fields_onto_fieldstack
3789 (TREE_TYPE (field), fieldstack,
3790 offset + bitpos_of_field (field), has_union))
3791 && DECL_SIZE (field)
3792 && !integer_zerop (DECL_SIZE (field)))
3793 /* Empty structures may have actual size, like in C++. So
3794 see if we didn't push any subfields and the size is
3795 nonzero, push the field onto the stack */
3796 push = true;
3797
3798 if (push)
3799 {
3800 fieldoff_s *pair;
3801
3802 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
3803 pair->type = TREE_TYPE (field);
3804 pair->size = DECL_SIZE (field);
3805 pair->decl = field;
3806 pair->offset = offset + bitpos_of_field (field);
3807 count++;
3808 }
3809 else
3810 count += pushed;
3811 }
3812
3813 return count;
3814 }
3815
3816 /* Create a constraint from ANYTHING variable to VI. */
3817 static void
3818 make_constraint_from_anything (varinfo_t vi)
3819 {
3820 struct constraint_expr lhs, rhs;
3821
3822 lhs.var = vi->id;
3823 lhs.offset = 0;
3824 lhs.type = SCALAR;
3825
3826 rhs.var = anything_id;
3827 rhs.offset = 0;
3828 rhs.type = ADDRESSOF;
3829 process_constraint (new_constraint (lhs, rhs));
3830 }
3831
3832 /* Count the number of arguments DECL has, and set IS_VARARGS to true
3833 if it is a varargs function. */
3834
3835 static unsigned int
3836 count_num_arguments (tree decl, bool *is_varargs)
3837 {
3838 unsigned int i = 0;
3839 tree t;
3840
3841 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
3842 t;
3843 t = TREE_CHAIN (t))
3844 {
3845 if (TREE_VALUE (t) == void_type_node)
3846 break;
3847 i++;
3848 }
3849
3850 if (!t)
3851 *is_varargs = true;
3852 return i;
3853 }
3854
3855 /* Creation function node for DECL, using NAME, and return the index
3856 of the variable we've created for the function. */
3857
3858 static unsigned int
3859 create_function_info_for (tree decl, const char *name)
3860 {
3861 unsigned int index = VEC_length (varinfo_t, varmap);
3862 varinfo_t vi;
3863 tree arg;
3864 unsigned int i;
3865 bool is_varargs = false;
3866
3867 /* Create the variable info. */
3868
3869 vi = new_var_info (decl, index, name);
3870 vi->decl = decl;
3871 vi->offset = 0;
3872 vi->has_union = 0;
3873 vi->size = 1;
3874 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
3875 insert_vi_for_tree (vi->decl, vi);
3876 VEC_safe_push (varinfo_t, heap, varmap, vi);
3877
3878 stats.total_vars++;
3879
3880 /* If it's varargs, we don't know how many arguments it has, so we
3881 can't do much.
3882 */
3883 if (is_varargs)
3884 {
3885 vi->fullsize = ~0;
3886 vi->size = ~0;
3887 vi->is_unknown_size_var = true;
3888 return index;
3889 }
3890
3891
3892 arg = DECL_ARGUMENTS (decl);
3893
3894 /* Set up variables for each argument. */
3895 for (i = 1; i < vi->fullsize; i++)
3896 {
3897 varinfo_t argvi;
3898 const char *newname;
3899 char *tempname;
3900 unsigned int newindex;
3901 tree argdecl = decl;
3902
3903 if (arg)
3904 argdecl = arg;
3905
3906 newindex = VEC_length (varinfo_t, varmap);
3907 asprintf (&tempname, "%s.arg%d", name, i-1);
3908 newname = ggc_strdup (tempname);
3909 free (tempname);
3910
3911 argvi = new_var_info (argdecl, newindex, newname);
3912 argvi->decl = argdecl;
3913 VEC_safe_push (varinfo_t, heap, varmap, argvi);
3914 argvi->offset = i;
3915 argvi->size = 1;
3916 argvi->fullsize = vi->fullsize;
3917 argvi->has_union = false;
3918 insert_into_field_list_sorted (vi, argvi);
3919 stats.total_vars ++;
3920 if (arg)
3921 {
3922 insert_vi_for_tree (arg, argvi);
3923 arg = TREE_CHAIN (arg);
3924 }
3925 }
3926
3927 /* Create a variable for the return var. */
3928 if (DECL_RESULT (decl) != NULL
3929 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
3930 {
3931 varinfo_t resultvi;
3932 const char *newname;
3933 char *tempname;
3934 unsigned int newindex;
3935 tree resultdecl = decl;
3936
3937 vi->fullsize ++;
3938
3939 if (DECL_RESULT (decl))
3940 resultdecl = DECL_RESULT (decl);
3941
3942 newindex = VEC_length (varinfo_t, varmap);
3943 asprintf (&tempname, "%s.result", name);
3944 newname = ggc_strdup (tempname);
3945 free (tempname);
3946
3947 resultvi = new_var_info (resultdecl, newindex, newname);
3948 resultvi->decl = resultdecl;
3949 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
3950 resultvi->offset = i;
3951 resultvi->size = 1;
3952 resultvi->fullsize = vi->fullsize;
3953 resultvi->has_union = false;
3954 insert_into_field_list_sorted (vi, resultvi);
3955 stats.total_vars ++;
3956 if (DECL_RESULT (decl))
3957 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
3958 }
3959 return index;
3960 }
3961
3962
3963 /* Return true if FIELDSTACK contains fields that overlap.
3964 FIELDSTACK is assumed to be sorted by offset. */
3965
3966 static bool
3967 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
3968 {
3969 fieldoff_s *fo = NULL;
3970 unsigned int i;
3971 HOST_WIDE_INT lastoffset = -1;
3972
3973 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3974 {
3975 if (fo->offset == lastoffset)
3976 return true;
3977 lastoffset = fo->offset;
3978 }
3979 return false;
3980 }
3981
3982 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
3983 This will also create any varinfo structures necessary for fields
3984 of DECL. */
3985
3986 static unsigned int
3987 create_variable_info_for (tree decl, const char *name)
3988 {
3989 unsigned int index = VEC_length (varinfo_t, varmap);
3990 varinfo_t vi;
3991 tree decltype = TREE_TYPE (decl);
3992 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
3993 bool notokay = false;
3994 bool hasunion;
3995 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
3996 VEC (fieldoff_s,heap) *fieldstack = NULL;
3997
3998 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
3999 return create_function_info_for (decl, name);
4000
4001 hasunion = TREE_CODE (decltype) == UNION_TYPE
4002 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
4003 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
4004 {
4005 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
4006 if (hasunion)
4007 {
4008 VEC_free (fieldoff_s, heap, fieldstack);
4009 notokay = true;
4010 }
4011 }
4012
4013
4014 /* If the variable doesn't have subvars, we may end up needing to
4015 sort the field list and create fake variables for all the
4016 fields. */
4017 vi = new_var_info (decl, index, name);
4018 vi->decl = decl;
4019 vi->offset = 0;
4020 vi->has_union = hasunion;
4021 if (!declsize
4022 || TREE_CODE (declsize) != INTEGER_CST
4023 || TREE_CODE (decltype) == UNION_TYPE
4024 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
4025 {
4026 vi->is_unknown_size_var = true;
4027 vi->fullsize = ~0;
4028 vi->size = ~0;
4029 }
4030 else
4031 {
4032 vi->fullsize = TREE_INT_CST_LOW (declsize);
4033 vi->size = vi->fullsize;
4034 }
4035
4036 insert_vi_for_tree (vi->decl, vi);
4037 VEC_safe_push (varinfo_t, heap, varmap, vi);
4038 if (is_global && (!flag_whole_program || !in_ipa_mode))
4039 make_constraint_from_anything (vi);
4040
4041 stats.total_vars++;
4042 if (use_field_sensitive
4043 && !notokay
4044 && !vi->is_unknown_size_var
4045 && var_can_have_subvars (decl)
4046 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4047 {
4048 unsigned int newindex = VEC_length (varinfo_t, varmap);
4049 fieldoff_s *fo = NULL;
4050 unsigned int i;
4051
4052 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4053 {
4054 if (! fo->size
4055 || TREE_CODE (fo->size) != INTEGER_CST
4056 || fo->offset < 0)
4057 {
4058 notokay = true;
4059 break;
4060 }
4061 }
4062
4063 /* We can't sort them if we have a field with a variable sized type,
4064 which will make notokay = true. In that case, we are going to return
4065 without creating varinfos for the fields anyway, so sorting them is a
4066 waste to boot. */
4067 if (!notokay)
4068 {
4069 sort_fieldstack (fieldstack);
4070 /* Due to some C++ FE issues, like PR 22488, we might end up
4071 what appear to be overlapping fields even though they,
4072 in reality, do not overlap. Until the C++ FE is fixed,
4073 we will simply disable field-sensitivity for these cases. */
4074 notokay = check_for_overlaps (fieldstack);
4075 }
4076
4077
4078 if (VEC_length (fieldoff_s, fieldstack) != 0)
4079 fo = VEC_index (fieldoff_s, fieldstack, 0);
4080
4081 if (fo == NULL || notokay)
4082 {
4083 vi->is_unknown_size_var = 1;
4084 vi->fullsize = ~0;
4085 vi->size = ~0;
4086 VEC_free (fieldoff_s, heap, fieldstack);
4087 return index;
4088 }
4089
4090 vi->size = TREE_INT_CST_LOW (fo->size);
4091 vi->offset = fo->offset;
4092 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4093 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4094 i--)
4095 {
4096 varinfo_t newvi;
4097 const char *newname = "NULL";
4098 char *tempname;
4099
4100 newindex = VEC_length (varinfo_t, varmap);
4101 if (dump_file)
4102 {
4103 if (fo->decl)
4104 asprintf (&tempname, "%s.%s",
4105 vi->name, alias_get_name (fo->decl));
4106 else
4107 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4108 vi->name, fo->offset);
4109 newname = ggc_strdup (tempname);
4110 free (tempname);
4111 }
4112 newvi = new_var_info (decl, newindex, newname);
4113 newvi->offset = fo->offset;
4114 newvi->size = TREE_INT_CST_LOW (fo->size);
4115 newvi->fullsize = vi->fullsize;
4116 insert_into_field_list (vi, newvi);
4117 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4118 if (is_global && (!flag_whole_program || !in_ipa_mode))
4119 make_constraint_from_anything (newvi);
4120
4121 stats.total_vars++;
4122 }
4123 VEC_free (fieldoff_s, heap, fieldstack);
4124 }
4125 return index;
4126 }
4127
4128 /* Print out the points-to solution for VAR to FILE. */
4129
4130 void
4131 dump_solution_for_var (FILE *file, unsigned int var)
4132 {
4133 varinfo_t vi = get_varinfo (var);
4134 unsigned int i;
4135 bitmap_iterator bi;
4136
4137 if (find (var) != var)
4138 {
4139 varinfo_t vipt = get_varinfo (find (var));
4140 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4141 }
4142 else
4143 {
4144 fprintf (file, "%s = { ", vi->name);
4145 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4146 {
4147 fprintf (file, "%s ", get_varinfo (i)->name);
4148 }
4149 fprintf (file, "}");
4150 if (vi->no_tbaa_pruning)
4151 fprintf (file, " no-tbaa-pruning");
4152 fprintf (file, "\n");
4153 }
4154 }
4155
4156 /* Print the points-to solution for VAR to stdout. */
4157
4158 void
4159 debug_solution_for_var (unsigned int var)
4160 {
4161 dump_solution_for_var (stdout, var);
4162 }
4163
4164 /* Create varinfo structures for all of the variables in the
4165 function for intraprocedural mode. */
4166
4167 static void
4168 intra_create_variable_infos (void)
4169 {
4170 tree t;
4171 struct constraint_expr lhs, rhs;
4172
4173 /* For each incoming pointer argument arg, create the constraint ARG
4174 = ANYTHING or a dummy variable if flag_argument_noalias is set. */
4175 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4176 {
4177 varinfo_t p;
4178
4179 if (!could_have_pointers (t))
4180 continue;
4181
4182 /* If flag_argument_noalias is set, then function pointer
4183 arguments are guaranteed not to point to each other. In that
4184 case, create an artificial variable PARM_NOALIAS and the
4185 constraint ARG = &PARM_NOALIAS. */
4186 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4187 {
4188 varinfo_t vi;
4189 tree heapvar = heapvar_lookup (t);
4190
4191 lhs.offset = 0;
4192 lhs.type = SCALAR;
4193 lhs.var = get_vi_for_tree (t)->id;
4194
4195 if (heapvar == NULL_TREE)
4196 {
4197 var_ann_t ann;
4198 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4199 "PARM_NOALIAS");
4200 DECL_EXTERNAL (heapvar) = 1;
4201 if (gimple_referenced_vars (cfun))
4202 add_referenced_var (heapvar);
4203
4204 heapvar_insert (t, heapvar);
4205
4206 ann = get_var_ann (heapvar);
4207 if (flag_argument_noalias == 1)
4208 ann->noalias_state = NO_ALIAS;
4209 else if (flag_argument_noalias == 2)
4210 ann->noalias_state = NO_ALIAS_GLOBAL;
4211 else if (flag_argument_noalias == 3)
4212 ann->noalias_state = NO_ALIAS_ANYTHING;
4213 else
4214 gcc_unreachable ();
4215 }
4216
4217 vi = get_vi_for_tree (heapvar);
4218 vi->is_artificial_var = 1;
4219 vi->is_heap_var = 1;
4220 rhs.var = vi->id;
4221 rhs.type = ADDRESSOF;
4222 rhs.offset = 0;
4223 for (p = get_varinfo (lhs.var); p; p = p->next)
4224 {
4225 struct constraint_expr temp = lhs;
4226 temp.var = p->id;
4227 process_constraint (new_constraint (temp, rhs));
4228 }
4229 }
4230 else
4231 {
4232 varinfo_t arg_vi = get_vi_for_tree (t);
4233
4234 for (p = arg_vi; p; p = p->next)
4235 make_constraint_from_anything (p);
4236 }
4237 }
4238 }
4239
4240 /* Structure used to put solution bitmaps in a hashtable so they can
4241 be shared among variables with the same points-to set. */
4242
4243 typedef struct shared_bitmap_info
4244 {
4245 bitmap pt_vars;
4246 hashval_t hashcode;
4247 } *shared_bitmap_info_t;
4248
4249 static htab_t shared_bitmap_table;
4250
4251 /* Hash function for a shared_bitmap_info_t */
4252
4253 static hashval_t
4254 shared_bitmap_hash (const void *p)
4255 {
4256 const shared_bitmap_info_t bi = (shared_bitmap_info_t) p;
4257 return bi->hashcode;
4258 }
4259
4260 /* Equality function for two shared_bitmap_info_t's. */
4261
4262 static int
4263 shared_bitmap_eq (const void *p1, const void *p2)
4264 {
4265 const shared_bitmap_info_t sbi1 = (shared_bitmap_info_t) p1;
4266 const shared_bitmap_info_t sbi2 = (shared_bitmap_info_t) p2;
4267 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4268 }
4269
4270 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4271 existing instance if there is one, NULL otherwise. */
4272
4273 static bitmap
4274 shared_bitmap_lookup (bitmap pt_vars)
4275 {
4276 void **slot;
4277 struct shared_bitmap_info sbi;
4278
4279 sbi.pt_vars = pt_vars;
4280 sbi.hashcode = bitmap_hash (pt_vars);
4281
4282 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4283 sbi.hashcode, NO_INSERT);
4284 if (!slot)
4285 return NULL;
4286 else
4287 return ((shared_bitmap_info_t) *slot)->pt_vars;
4288 }
4289
4290
4291 /* Add a bitmap to the shared bitmap hashtable. */
4292
4293 static void
4294 shared_bitmap_add (bitmap pt_vars)
4295 {
4296 void **slot;
4297 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4298
4299 sbi->pt_vars = pt_vars;
4300 sbi->hashcode = bitmap_hash (pt_vars);
4301
4302 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4303 sbi->hashcode, INSERT);
4304 gcc_assert (!*slot);
4305 *slot = (void *) sbi;
4306 }
4307
4308
4309 /* Set bits in INTO corresponding to the variable uids in solution set
4310 FROM, which came from variable PTR.
4311 For variables that are actually dereferenced, we also use type
4312 based alias analysis to prune the points-to sets.
4313 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4314 help determine whether we are we are allowed to prune using TBAA.
4315 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4316 the from set. */
4317
4318 static void
4319 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4320 bool no_tbaa_pruning)
4321 {
4322 unsigned int i;
4323 bitmap_iterator bi;
4324 subvar_t sv;
4325 HOST_WIDE_INT ptr_alias_set = get_alias_set (TREE_TYPE (ptr));
4326
4327 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4328 {
4329 varinfo_t vi = get_varinfo (i);
4330 unsigned HOST_WIDE_INT var_alias_set;
4331
4332 /* The only artificial variables that are allowed in a may-alias
4333 set are heap variables. */
4334 if (vi->is_artificial_var && !vi->is_heap_var)
4335 continue;
4336
4337 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
4338 {
4339 /* Variables containing unions may need to be converted to
4340 their SFT's, because SFT's can have unions and we cannot. */
4341 for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
4342 bitmap_set_bit (into, DECL_UID (sv->var));
4343 }
4344 else if (TREE_CODE (vi->decl) == VAR_DECL
4345 || TREE_CODE (vi->decl) == PARM_DECL)
4346 {
4347 if (var_can_have_subvars (vi->decl)
4348 && get_subvars_for_var (vi->decl))
4349 {
4350 /* If VI->DECL is an aggregate for which we created
4351 SFTs, add the SFT corresponding to VI->OFFSET. */
4352 tree sft = get_subvar_at (vi->decl, vi->offset);
4353 if (sft)
4354 {
4355 var_alias_set = get_alias_set (sft);
4356 if (no_tbaa_pruning
4357 || (!is_derefed && !vi->directly_dereferenced)
4358 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4359 bitmap_set_bit (into, DECL_UID (sft));
4360 }
4361 }
4362 else
4363 {
4364 /* Otherwise, just add VI->DECL to the alias set.
4365 Don't type prune artificial vars. */
4366 if (vi->is_artificial_var)
4367 bitmap_set_bit (into, DECL_UID (vi->decl));
4368 else
4369 {
4370 var_alias_set = get_alias_set (vi->decl);
4371 if (no_tbaa_pruning
4372 || (!is_derefed && !vi->directly_dereferenced)
4373 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4374 bitmap_set_bit (into, DECL_UID (vi->decl));
4375 }
4376 }
4377 }
4378 }
4379 }
4380
4381
4382 static bool have_alias_info = false;
4383
4384 /* The list of SMT's that are in use by our pointer variables. This
4385 is the set of SMT's for all pointers that can point to anything. */
4386 static bitmap used_smts;
4387
4388 /* Due to the ordering of points-to set calculation and SMT
4389 calculation being a bit co-dependent, we can't just calculate SMT
4390 used info whenever we want, we have to calculate it around the time
4391 that find_what_p_points_to is called. */
4392
4393 /* Mark which SMT's are in use by points-to anything variables. */
4394
4395 void
4396 set_used_smts (void)
4397 {
4398 int i;
4399 varinfo_t vi;
4400 used_smts = BITMAP_ALLOC (&pta_obstack);
4401
4402 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4403 {
4404 tree var = vi->decl;
4405 tree smt;
4406 var_ann_t va;
4407 struct ptr_info_def *pi = NULL;
4408
4409 /* For parm decls, the pointer info may be under the default
4410 def. */
4411 if (TREE_CODE (vi->decl) == PARM_DECL
4412 && gimple_default_def (cfun, var))
4413 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4414 else if (TREE_CODE (var) == SSA_NAME)
4415 pi = SSA_NAME_PTR_INFO (var);
4416
4417 /* Skip the special variables and those without their own
4418 solution set. */
4419 if (vi->is_special_var || find (vi->id) != vi->id
4420 || !SSA_VAR_P (var)
4421 || (pi && !pi->is_dereferenced)
4422 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4423 || !POINTER_TYPE_P (TREE_TYPE (var)))
4424 continue;
4425
4426 if (TREE_CODE (var) == SSA_NAME)
4427 var = SSA_NAME_VAR (var);
4428
4429 va = var_ann (var);
4430 if (!va)
4431 continue;
4432
4433 smt = va->symbol_mem_tag;
4434 if (smt && bitmap_bit_p (vi->solution, anything_id))
4435 bitmap_set_bit (used_smts, DECL_UID (smt));
4436 }
4437 }
4438
4439 /* Merge the necessary SMT's into the bitmap INTO, which is
4440 P's varinfo. This involves merging all SMT's that are a subset of
4441 the SMT necessary for P. */
4442
4443 static void
4444 merge_smts_into (tree p, bitmap solution)
4445 {
4446 unsigned int i;
4447 bitmap_iterator bi;
4448 tree smt;
4449 bitmap aliases;
4450 tree var = p;
4451
4452 if (TREE_CODE (p) == SSA_NAME)
4453 var = SSA_NAME_VAR (p);
4454
4455 smt = var_ann (var)->symbol_mem_tag;
4456 if (smt)
4457 {
4458 HOST_WIDE_INT smtset = get_alias_set (TREE_TYPE (smt));
4459
4460 /* Need to set the SMT subsets first before this
4461 will work properly. */
4462 bitmap_set_bit (solution, DECL_UID (smt));
4463 EXECUTE_IF_SET_IN_BITMAP (used_smts, 0, i, bi)
4464 {
4465 tree newsmt = referenced_var (i);
4466 tree newsmttype = TREE_TYPE (newsmt);
4467
4468 if (alias_set_subset_of (get_alias_set (newsmttype),
4469 smtset))
4470 bitmap_set_bit (solution, i);
4471 }
4472
4473 aliases = MTAG_ALIASES (smt);
4474 if (aliases)
4475 bitmap_ior_into (solution, aliases);
4476 }
4477 }
4478
4479 /* Given a pointer variable P, fill in its points-to set, or return
4480 false if we can't.
4481 Rather than return false for variables that point-to anything, we
4482 instead find the corresponding SMT, and merge in it's aliases. In
4483 addition to these aliases, we also set the bits for the SMT's
4484 themselves and their subsets, as SMT's are still in use by
4485 non-SSA_NAME's, and pruning may eliminate every one of their
4486 aliases. In such a case, if we did not include the right set of
4487 SMT's in the points-to set of the variable, we'd end up with
4488 statements that do not conflict but should. */
4489
4490 bool
4491 find_what_p_points_to (tree p)
4492 {
4493 tree lookup_p = p;
4494 varinfo_t vi;
4495
4496 if (!have_alias_info)
4497 return false;
4498
4499 /* For parameters, get at the points-to set for the actual parm
4500 decl. */
4501 if (TREE_CODE (p) == SSA_NAME
4502 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4503 && SSA_NAME_IS_DEFAULT_DEF (p))
4504 lookup_p = SSA_NAME_VAR (p);
4505
4506 vi = lookup_vi_for_tree (lookup_p);
4507 if (vi)
4508 {
4509 if (vi->is_artificial_var)
4510 return false;
4511
4512 /* See if this is a field or a structure. */
4513 if (vi->size != vi->fullsize)
4514 {
4515 /* Nothing currently asks about structure fields directly,
4516 but when they do, we need code here to hand back the
4517 points-to set. */
4518 if (!var_can_have_subvars (vi->decl)
4519 || get_subvars_for_var (vi->decl) == NULL)
4520 return false;
4521 }
4522 else
4523 {
4524 struct ptr_info_def *pi = get_ptr_info (p);
4525 unsigned int i;
4526 bitmap_iterator bi;
4527 bool was_pt_anything = false;
4528 bitmap finished_solution;
4529 bitmap result;
4530
4531 if (!pi->is_dereferenced)
4532 return false;
4533
4534 /* This variable may have been collapsed, let's get the real
4535 variable. */
4536 vi = get_varinfo (find (vi->id));
4537
4538 /* Translate artificial variables into SSA_NAME_PTR_INFO
4539 attributes. */
4540 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4541 {
4542 varinfo_t vi = get_varinfo (i);
4543
4544 if (vi->is_artificial_var)
4545 {
4546 /* FIXME. READONLY should be handled better so that
4547 flow insensitive aliasing can disregard writable
4548 aliases. */
4549 if (vi->id == nothing_id)
4550 pi->pt_null = 1;
4551 else if (vi->id == anything_id)
4552 was_pt_anything = 1;
4553 else if (vi->id == readonly_id)
4554 was_pt_anything = 1;
4555 else if (vi->id == integer_id)
4556 was_pt_anything = 1;
4557 else if (vi->is_heap_var)
4558 pi->pt_global_mem = 1;
4559 }
4560 }
4561
4562 /* Share the final set of variables when possible. */
4563
4564 finished_solution = BITMAP_GGC_ALLOC ();
4565 stats.points_to_sets_created++;
4566
4567 /* Instead of using pt_anything, we merge in the SMT aliases
4568 for the underlying SMT. In addition, if they could have
4569 pointed to anything, they could point to global memory.
4570 But we cannot do that for ref-all pointers because these
4571 aliases have not been computed yet. */
4572 if (was_pt_anything)
4573 {
4574 if (PTR_IS_REF_ALL (p))
4575 {
4576 pi->pt_anything = 1;
4577 return false;
4578 }
4579
4580 merge_smts_into (p, finished_solution);
4581 pi->pt_global_mem = 1;
4582 }
4583
4584 set_uids_in_ptset (vi->decl, finished_solution, vi->solution,
4585 vi->directly_dereferenced,
4586 vi->no_tbaa_pruning);
4587 result = shared_bitmap_lookup (finished_solution);
4588
4589 if (!result)
4590 {
4591 shared_bitmap_add (finished_solution);
4592 pi->pt_vars = finished_solution;
4593 }
4594 else
4595 {
4596 pi->pt_vars = result;
4597 bitmap_clear (finished_solution);
4598 }
4599
4600 if (bitmap_empty_p (pi->pt_vars))
4601 pi->pt_vars = NULL;
4602
4603 return true;
4604 }
4605 }
4606
4607 return false;
4608 }
4609
4610
4611
4612 /* Dump points-to information to OUTFILE. */
4613
4614 void
4615 dump_sa_points_to_info (FILE *outfile)
4616 {
4617 unsigned int i;
4618
4619 fprintf (outfile, "\nPoints-to sets\n\n");
4620
4621 if (dump_flags & TDF_STATS)
4622 {
4623 fprintf (outfile, "Stats:\n");
4624 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4625 fprintf (outfile, "Non-pointer vars: %d\n",
4626 stats.nonpointer_vars);
4627 fprintf (outfile, "Statically unified vars: %d\n",
4628 stats.unified_vars_static);
4629 fprintf (outfile, "Dynamically unified vars: %d\n",
4630 stats.unified_vars_dynamic);
4631 fprintf (outfile, "Iterations: %d\n", stats.iterations);
4632 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
4633 fprintf (outfile, "Number of implicit edges: %d\n",
4634 stats.num_implicit_edges);
4635 }
4636
4637 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
4638 dump_solution_for_var (outfile, i);
4639 }
4640
4641
4642 /* Debug points-to information to stderr. */
4643
4644 void
4645 debug_sa_points_to_info (void)
4646 {
4647 dump_sa_points_to_info (stderr);
4648 }
4649
4650
4651 /* Initialize the always-existing constraint variables for NULL
4652 ANYTHING, READONLY, and INTEGER */
4653
4654 static void
4655 init_base_vars (void)
4656 {
4657 struct constraint_expr lhs, rhs;
4658
4659 /* Create the NULL variable, used to represent that a variable points
4660 to NULL. */
4661 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
4662 var_nothing = new_var_info (nothing_tree, 0, "NULL");
4663 insert_vi_for_tree (nothing_tree, var_nothing);
4664 var_nothing->is_artificial_var = 1;
4665 var_nothing->offset = 0;
4666 var_nothing->size = ~0;
4667 var_nothing->fullsize = ~0;
4668 var_nothing->is_special_var = 1;
4669 nothing_id = 0;
4670 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
4671
4672 /* Create the ANYTHING variable, used to represent that a variable
4673 points to some unknown piece of memory. */
4674 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
4675 var_anything = new_var_info (anything_tree, 1, "ANYTHING");
4676 insert_vi_for_tree (anything_tree, var_anything);
4677 var_anything->is_artificial_var = 1;
4678 var_anything->size = ~0;
4679 var_anything->offset = 0;
4680 var_anything->next = NULL;
4681 var_anything->fullsize = ~0;
4682 var_anything->is_special_var = 1;
4683 anything_id = 1;
4684
4685 /* Anything points to anything. This makes deref constraints just
4686 work in the presence of linked list and other p = *p type loops,
4687 by saying that *ANYTHING = ANYTHING. */
4688 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
4689 lhs.type = SCALAR;
4690 lhs.var = anything_id;
4691 lhs.offset = 0;
4692 rhs.type = ADDRESSOF;
4693 rhs.var = anything_id;
4694 rhs.offset = 0;
4695
4696 /* This specifically does not use process_constraint because
4697 process_constraint ignores all anything = anything constraints, since all
4698 but this one are redundant. */
4699 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
4700
4701 /* Create the READONLY variable, used to represent that a variable
4702 points to readonly memory. */
4703 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
4704 var_readonly = new_var_info (readonly_tree, 2, "READONLY");
4705 var_readonly->is_artificial_var = 1;
4706 var_readonly->offset = 0;
4707 var_readonly->size = ~0;
4708 var_readonly->fullsize = ~0;
4709 var_readonly->next = NULL;
4710 var_readonly->is_special_var = 1;
4711 insert_vi_for_tree (readonly_tree, var_readonly);
4712 readonly_id = 2;
4713 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
4714
4715 /* readonly memory points to anything, in order to make deref
4716 easier. In reality, it points to anything the particular
4717 readonly variable can point to, but we don't track this
4718 separately. */
4719 lhs.type = SCALAR;
4720 lhs.var = readonly_id;
4721 lhs.offset = 0;
4722 rhs.type = ADDRESSOF;
4723 rhs.var = anything_id;
4724 rhs.offset = 0;
4725
4726 process_constraint (new_constraint (lhs, rhs));
4727
4728 /* Create the INTEGER variable, used to represent that a variable points
4729 to an INTEGER. */
4730 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
4731 var_integer = new_var_info (integer_tree, 3, "INTEGER");
4732 insert_vi_for_tree (integer_tree, var_integer);
4733 var_integer->is_artificial_var = 1;
4734 var_integer->size = ~0;
4735 var_integer->fullsize = ~0;
4736 var_integer->offset = 0;
4737 var_integer->next = NULL;
4738 var_integer->is_special_var = 1;
4739 integer_id = 3;
4740 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
4741
4742 /* INTEGER = ANYTHING, because we don't know where a dereference of
4743 a random integer will point to. */
4744 lhs.type = SCALAR;
4745 lhs.var = integer_id;
4746 lhs.offset = 0;
4747 rhs.type = ADDRESSOF;
4748 rhs.var = anything_id;
4749 rhs.offset = 0;
4750 process_constraint (new_constraint (lhs, rhs));
4751 }
4752
4753 /* Initialize things necessary to perform PTA */
4754
4755 static void
4756 init_alias_vars (void)
4757 {
4758 bitmap_obstack_initialize (&pta_obstack);
4759 bitmap_obstack_initialize (&oldpta_obstack);
4760 bitmap_obstack_initialize (&predbitmap_obstack);
4761
4762 constraint_pool = create_alloc_pool ("Constraint pool",
4763 sizeof (struct constraint), 30);
4764 variable_info_pool = create_alloc_pool ("Variable info pool",
4765 sizeof (struct variable_info), 30);
4766 constraints = VEC_alloc (constraint_t, heap, 8);
4767 varmap = VEC_alloc (varinfo_t, heap, 8);
4768 vi_for_tree = pointer_map_create ();
4769
4770 memset (&stats, 0, sizeof (stats));
4771 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
4772 shared_bitmap_eq, free);
4773 init_base_vars ();
4774 }
4775
4776 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
4777 predecessor edges. */
4778
4779 static void
4780 remove_preds_and_fake_succs (constraint_graph_t graph)
4781 {
4782 unsigned int i;
4783
4784 /* Clear the implicit ref and address nodes from the successor
4785 lists. */
4786 for (i = 0; i < FIRST_REF_NODE; i++)
4787 {
4788 if (graph->succs[i])
4789 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
4790 FIRST_REF_NODE * 2);
4791 }
4792
4793 /* Free the successor list for the non-ref nodes. */
4794 for (i = FIRST_REF_NODE; i < graph->size; i++)
4795 {
4796 if (graph->succs[i])
4797 BITMAP_FREE (graph->succs[i]);
4798 }
4799
4800 /* Now reallocate the size of the successor list as, and blow away
4801 the predecessor bitmaps. */
4802 graph->size = VEC_length (varinfo_t, varmap);
4803 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
4804
4805 free (graph->implicit_preds);
4806 graph->implicit_preds = NULL;
4807 free (graph->preds);
4808 graph->preds = NULL;
4809 bitmap_obstack_release (&predbitmap_obstack);
4810 }
4811
4812 /* Compute the set of variables we can't TBAA prune. */
4813
4814 static void
4815 compute_tbaa_pruning (void)
4816 {
4817 unsigned int size = VEC_length (varinfo_t, varmap);
4818 unsigned int i;
4819 bool any;
4820
4821 changed_count = 0;
4822 changed = sbitmap_alloc (size);
4823 sbitmap_zero (changed);
4824
4825 /* Mark all initial no_tbaa_pruning nodes as changed. */
4826 any = false;
4827 for (i = 0; i < size; ++i)
4828 {
4829 varinfo_t ivi = get_varinfo (i);
4830
4831 if (find (i) == i && ivi->no_tbaa_pruning)
4832 {
4833 any = true;
4834 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
4835 || VEC_length (constraint_t, graph->complex[i]) > 0)
4836 {
4837 SET_BIT (changed, i);
4838 ++changed_count;
4839 }
4840 }
4841 }
4842
4843 while (changed_count > 0)
4844 {
4845 struct topo_info *ti = init_topo_info ();
4846 ++stats.iterations;
4847
4848 bitmap_obstack_initialize (&iteration_obstack);
4849
4850 compute_topo_order (graph, ti);
4851
4852 while (VEC_length (unsigned, ti->topo_order) != 0)
4853 {
4854 bitmap_iterator bi;
4855
4856 i = VEC_pop (unsigned, ti->topo_order);
4857
4858 /* If this variable is not a representative, skip it. */
4859 if (find (i) != i)
4860 continue;
4861
4862 /* If the node has changed, we need to process the complex
4863 constraints and outgoing edges again. */
4864 if (TEST_BIT (changed, i))
4865 {
4866 unsigned int j;
4867 constraint_t c;
4868 VEC(constraint_t,heap) *complex = graph->complex[i];
4869
4870 RESET_BIT (changed, i);
4871 --changed_count;
4872
4873 /* Process the complex copy constraints. */
4874 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
4875 {
4876 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
4877 {
4878 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
4879
4880 if (!lhsvi->no_tbaa_pruning)
4881 {
4882 lhsvi->no_tbaa_pruning = true;
4883 if (!TEST_BIT (changed, lhsvi->id))
4884 {
4885 SET_BIT (changed, lhsvi->id);
4886 ++changed_count;
4887 }
4888 }
4889 }
4890 }
4891
4892 /* Propagate to all successors. */
4893 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
4894 {
4895 unsigned int to = find (j);
4896 varinfo_t tovi = get_varinfo (to);
4897
4898 /* Don't propagate to ourselves. */
4899 if (to == i)
4900 continue;
4901
4902 if (!tovi->no_tbaa_pruning)
4903 {
4904 tovi->no_tbaa_pruning = true;
4905 if (!TEST_BIT (changed, to))
4906 {
4907 SET_BIT (changed, to);
4908 ++changed_count;
4909 }
4910 }
4911 }
4912 }
4913 }
4914
4915 free_topo_info (ti);
4916 bitmap_obstack_release (&iteration_obstack);
4917 }
4918
4919 sbitmap_free (changed);
4920
4921 if (any)
4922 {
4923 for (i = 0; i < size; ++i)
4924 {
4925 varinfo_t ivi = get_varinfo (i);
4926 varinfo_t ivip = get_varinfo (find (i));
4927
4928 if (ivip->no_tbaa_pruning)
4929 {
4930 tree var = ivi->decl;
4931
4932 if (TREE_CODE (var) == SSA_NAME)
4933 var = SSA_NAME_VAR (var);
4934
4935 if (POINTER_TYPE_P (TREE_TYPE (var)))
4936 {
4937 DECL_NO_TBAA_P (var) = 1;
4938
4939 /* Tell the RTL layer that this pointer can alias
4940 anything. */
4941 DECL_POINTER_ALIAS_SET (var) = 0;
4942 }
4943 }
4944 }
4945 }
4946 }
4947
4948 /* Create points-to sets for the current function. See the comments
4949 at the start of the file for an algorithmic overview. */
4950
4951 void
4952 compute_points_to_sets (struct alias_info *ai)
4953 {
4954 struct scc_info *si;
4955 basic_block bb;
4956
4957 timevar_push (TV_TREE_PTA);
4958
4959 init_alias_vars ();
4960 init_alias_heapvars ();
4961
4962 intra_create_variable_infos ();
4963
4964 /* Now walk all statements and derive aliases. */
4965 FOR_EACH_BB (bb)
4966 {
4967 block_stmt_iterator bsi;
4968 tree phi;
4969
4970 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4971 {
4972 if (is_gimple_reg (PHI_RESULT (phi)))
4973 {
4974 find_func_aliases (phi);
4975
4976 /* Update various related attributes like escaped
4977 addresses, pointer dereferences for loads and stores.
4978 This is used when creating name tags and alias
4979 sets. */
4980 update_alias_info (phi, ai);
4981 }
4982 }
4983
4984 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
4985 {
4986 tree stmt = bsi_stmt (bsi);
4987
4988 find_func_aliases (stmt);
4989
4990 /* Update various related attributes like escaped
4991 addresses, pointer dereferences for loads and stores.
4992 This is used when creating name tags and alias
4993 sets. */
4994 update_alias_info (stmt, ai);
4995
4996 /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
4997 been captured, and we can remove them. */
4998 if (TREE_CODE (stmt) == CHANGE_DYNAMIC_TYPE_EXPR)
4999 bsi_remove (&bsi, true);
5000 else
5001 bsi_next (&bsi);
5002 }
5003 }
5004
5005
5006 if (dump_file)
5007 {
5008 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5009 dump_constraints (dump_file);
5010 }
5011
5012 if (dump_file)
5013 fprintf (dump_file,
5014 "\nCollapsing static cycles and doing variable "
5015 "substitution:\n");
5016 build_pred_graph ();
5017 si = perform_var_substitution (graph);
5018 move_complex_constraints (graph, si);
5019 free_var_substitution_info (si);
5020
5021 build_succ_graph ();
5022 find_indirect_cycles (graph);
5023
5024 /* Implicit nodes and predecessors are no longer necessary at this
5025 point. */
5026 remove_preds_and_fake_succs (graph);
5027
5028 if (dump_file)
5029 fprintf (dump_file, "\nSolving graph:\n");
5030
5031 solve_graph (graph);
5032
5033 compute_tbaa_pruning ();
5034
5035 if (dump_file)
5036 dump_sa_points_to_info (dump_file);
5037
5038 have_alias_info = true;
5039
5040 timevar_pop (TV_TREE_PTA);
5041 }
5042
5043
5044 /* Delete created points-to sets. */
5045
5046 void
5047 delete_points_to_sets (void)
5048 {
5049 varinfo_t v;
5050 int i;
5051
5052 htab_delete (shared_bitmap_table);
5053 if (dump_file && (dump_flags & TDF_STATS))
5054 fprintf (dump_file, "Points to sets created:%d\n",
5055 stats.points_to_sets_created);
5056
5057 pointer_map_destroy (vi_for_tree);
5058 bitmap_obstack_release (&pta_obstack);
5059 VEC_free (constraint_t, heap, constraints);
5060
5061 for (i = 0; VEC_iterate (varinfo_t, varmap, i, v); i++)
5062 VEC_free (constraint_t, heap, graph->complex[i]);
5063 free (graph->complex);
5064
5065 free (graph->rep);
5066 free (graph->succs);
5067 free (graph->indirect_cycles);
5068 free (graph);
5069
5070 VEC_free (varinfo_t, heap, varmap);
5071 free_alloc_pool (variable_info_pool);
5072 free_alloc_pool (constraint_pool);
5073 have_alias_info = false;
5074 }
5075
5076 /* Return true if we should execute IPA PTA. */
5077 static bool
5078 gate_ipa_pta (void)
5079 {
5080 return (flag_unit_at_a_time != 0
5081 && flag_ipa_pta
5082 /* Don't bother doing anything if the program has errors. */
5083 && !(errorcount || sorrycount));
5084 }
5085
5086 /* Execute the driver for IPA PTA. */
5087 static unsigned int
5088 ipa_pta_execute (void)
5089 {
5090 struct cgraph_node *node;
5091 struct scc_info *si;
5092
5093 in_ipa_mode = 1;
5094 init_alias_heapvars ();
5095 init_alias_vars ();
5096
5097 for (node = cgraph_nodes; node; node = node->next)
5098 {
5099 if (!node->analyzed || cgraph_is_master_clone (node))
5100 {
5101 unsigned int varid;
5102
5103 varid = create_function_info_for (node->decl,
5104 cgraph_node_name (node));
5105 if (node->local.externally_visible)
5106 {
5107 varinfo_t fi = get_varinfo (varid);
5108 for (; fi; fi = fi->next)
5109 make_constraint_from_anything (fi);
5110 }
5111 }
5112 }
5113 for (node = cgraph_nodes; node; node = node->next)
5114 {
5115 if (node->analyzed && cgraph_is_master_clone (node))
5116 {
5117 struct function *cfun = DECL_STRUCT_FUNCTION (node->decl);
5118 basic_block bb;
5119 tree old_func_decl = current_function_decl;
5120 if (dump_file)
5121 fprintf (dump_file,
5122 "Generating constraints for %s\n",
5123 cgraph_node_name (node));
5124 push_cfun (cfun);
5125 current_function_decl = node->decl;
5126
5127 FOR_EACH_BB_FN (bb, cfun)
5128 {
5129 block_stmt_iterator bsi;
5130 tree phi;
5131
5132 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5133 {
5134 if (is_gimple_reg (PHI_RESULT (phi)))
5135 {
5136 find_func_aliases (phi);
5137 }
5138 }
5139
5140 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5141 {
5142 tree stmt = bsi_stmt (bsi);
5143 find_func_aliases (stmt);
5144 }
5145 }
5146 current_function_decl = old_func_decl;
5147 pop_cfun ();
5148 }
5149 else
5150 {
5151 /* Make point to anything. */
5152 }
5153 }
5154
5155
5156
5157 if (dump_file)
5158 {
5159 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5160 dump_constraints (dump_file);
5161 }
5162
5163 if (dump_file)
5164 fprintf (dump_file,
5165 "\nCollapsing static cycles and doing variable "
5166 "substitution:\n");
5167
5168 build_pred_graph ();
5169 si = perform_var_substitution (graph);
5170 move_complex_constraints (graph, si);
5171 free_var_substitution_info (si);
5172
5173 build_succ_graph ();
5174 find_indirect_cycles (graph);
5175
5176 /* Implicit nodes and predecessors are no longer necessary at this
5177 point. */
5178 remove_preds_and_fake_succs (graph);
5179
5180 if (dump_file)
5181 fprintf (dump_file, "\nSolving graph:\n");
5182
5183 solve_graph (graph);
5184
5185 if (dump_file)
5186 dump_sa_points_to_info (dump_file);
5187
5188 in_ipa_mode = 0;
5189 delete_alias_heapvars ();
5190 delete_points_to_sets ();
5191 return 0;
5192 }
5193
5194 struct tree_opt_pass pass_ipa_pta =
5195 {
5196 "pta", /* name */
5197 gate_ipa_pta, /* gate */
5198 ipa_pta_execute, /* execute */
5199 NULL, /* sub */
5200 NULL, /* next */
5201 0, /* static_pass_number */
5202 TV_IPA_PTA, /* tv_id */
5203 0, /* properties_required */
5204 0, /* properties_provided */
5205 0, /* properties_destroyed */
5206 0, /* todo_flags_start */
5207 0, /* todo_flags_finish */
5208 0 /* letter */
5209 };
5210
5211 /* Initialize the heapvar for statement mapping. */
5212 void
5213 init_alias_heapvars (void)
5214 {
5215 if (!heapvar_for_stmt)
5216 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5217 NULL);
5218 }
5219
5220 void
5221 delete_alias_heapvars (void)
5222 {
5223 htab_delete (heapvar_for_stmt);
5224 heapvar_for_stmt = NULL;
5225 }
5226
5227
5228 #include "gt-tree-ssa-structalias.h"