1 /* Tree based points-to analysis
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "basic-block.h"
33 #include "gimple-ssa.h"
35 #include "tree-ssanames.h"
36 #include "tree-into-ssa.h"
38 #include "tree-inline.h"
39 #include "diagnostic-core.h"
40 #include "hash-table.h"
42 #include "tree-pass.h"
43 #include "alloc-pool.h"
44 #include "splay-tree.h"
47 #include "pointer-set.h"
49 /* The idea behind this analyzer is to generate set constraints from the
50 program, then solve the resulting constraints in order to generate the
53 Set constraints are a way of modeling program analysis problems that
54 involve sets. They consist of an inclusion constraint language,
55 describing the variables (each variable is a set) and operations that
56 are involved on the variables, and a set of rules that derive facts
57 from these operations. To solve a system of set constraints, you derive
58 all possible facts under the rules, which gives you the correct sets
61 See "Efficient Field-sensitive pointer analysis for C" by "David
62 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
63 http://citeseer.ist.psu.edu/pearce04efficient.html
65 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
66 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
67 http://citeseer.ist.psu.edu/heintze01ultrafast.html
69 There are three types of real constraint expressions, DEREF,
70 ADDRESSOF, and SCALAR. Each constraint expression consists
71 of a constraint type, a variable, and an offset.
73 SCALAR is a constraint expression type used to represent x, whether
74 it appears on the LHS or the RHS of a statement.
75 DEREF is a constraint expression type used to represent *x, whether
76 it appears on the LHS or the RHS of a statement.
77 ADDRESSOF is a constraint expression used to represent &x, whether
78 it appears on the LHS or the RHS of a statement.
80 Each pointer variable in the program is assigned an integer id, and
81 each field of a structure variable is assigned an integer id as well.
83 Structure variables are linked to their list of fields through a "next
84 field" in each variable that points to the next field in offset
86 Each variable for a structure field has
88 1. "size", that tells the size in bits of that field.
89 2. "fullsize, that tells the size in bits of the entire structure.
90 3. "offset", that tells the offset in bits from the beginning of the
91 structure to this field.
103 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
104 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
105 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
108 In order to solve the system of set constraints, the following is
111 1. Each constraint variable x has a solution set associated with it,
114 2. Constraints are separated into direct, copy, and complex.
115 Direct constraints are ADDRESSOF constraints that require no extra
116 processing, such as P = &Q
117 Copy constraints are those of the form P = Q.
118 Complex constraints are all the constraints involving dereferences
119 and offsets (including offsetted copies).
121 3. All direct constraints of the form P = &Q are processed, such
122 that Q is added to Sol(P)
124 4. All complex constraints for a given constraint variable are stored in a
125 linked list attached to that variable's node.
127 5. A directed graph is built out of the copy constraints. Each
128 constraint variable is a node in the graph, and an edge from
129 Q to P is added for each copy constraint of the form P = Q
131 6. The graph is then walked, and solution sets are
132 propagated along the copy edges, such that an edge from Q to P
133 causes Sol(P) <- Sol(P) union Sol(Q).
135 7. As we visit each node, all complex constraints associated with
136 that node are processed by adding appropriate copy edges to the graph, or the
137 appropriate variables to the solution set.
139 8. The process of walking the graph is iterated until no solution
142 Prior to walking the graph in steps 6 and 7, We perform static
143 cycle elimination on the constraint graph, as well
144 as off-line variable substitution.
146 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
147 on and turned into anything), but isn't. You can just see what offset
148 inside the pointed-to struct it's going to access.
150 TODO: Constant bounded arrays can be handled as if they were structs of the
151 same number of elements.
153 TODO: Modeling heap and incoming pointers becomes much better if we
154 add fields to them as we discover them, which we could do.
156 TODO: We could handle unions, but to be honest, it's probably not
157 worth the pain or slowdown. */
159 /* IPA-PTA optimizations possible.
161 When the indirect function called is ANYTHING we can add disambiguation
162 based on the function signatures (or simply the parameter count which
163 is the varinfo size). We also do not need to consider functions that
164 do not have their address taken.
166 The is_global_var bit which marks escape points is overly conservative
167 in IPA mode. Split it to is_escape_point and is_global_var - only
168 externally visible globals are escape points in IPA mode. This is
169 also needed to fix the pt_solution_includes_global predicate
170 (and thus ptr_deref_may_alias_global_p).
172 The way we introduce DECL_PT_UID to avoid fixing up all points-to
173 sets in the translation unit when we copy a DECL during inlining
174 pessimizes precision. The advantage is that the DECL_PT_UID keeps
175 compile-time and memory usage overhead low - the points-to sets
176 do not grow or get unshared as they would during a fixup phase.
177 An alternative solution is to delay IPA PTA until after all
178 inlining transformations have been applied.
180 The way we propagate clobber/use information isn't optimized.
181 It should use a new complex constraint that properly filters
182 out local variables of the callee (though that would make
183 the sets invalid after inlining). OTOH we might as well
184 admit defeat to WHOPR and simply do all the clobber/use analysis
185 and propagation after PTA finished but before we threw away
186 points-to information for memory variables. WHOPR and PTA
187 do not play along well anyway - the whole constraint solving
188 would need to be done in WPA phase and it will be very interesting
189 to apply the results to local SSA names during LTRANS phase.
191 We probably should compute a per-function unit-ESCAPE solution
192 propagating it simply like the clobber / uses solutions. The
193 solution can go alongside the non-IPA espaced solution and be
194 used to query which vars escape the unit through a function.
196 We never put function decls in points-to sets so we do not
197 keep the set of called functions for indirect calls.
199 And probably more. */
201 static bool use_field_sensitive
= true;
202 static int in_ipa_mode
= 0;
204 /* Used for predecessor bitmaps. */
205 static bitmap_obstack predbitmap_obstack
;
207 /* Used for points-to sets. */
208 static bitmap_obstack pta_obstack
;
210 /* Used for oldsolution members of variables. */
211 static bitmap_obstack oldpta_obstack
;
213 /* Used for per-solver-iteration bitmaps. */
214 static bitmap_obstack iteration_obstack
;
216 static unsigned int create_variable_info_for (tree
, const char *);
217 typedef struct constraint_graph
*constraint_graph_t
;
218 static void unify_nodes (constraint_graph_t
, unsigned int, unsigned int, bool);
221 typedef struct constraint
*constraint_t
;
224 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
226 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
228 static struct constraint_stats
230 unsigned int total_vars
;
231 unsigned int nonpointer_vars
;
232 unsigned int unified_vars_static
;
233 unsigned int unified_vars_dynamic
;
234 unsigned int iterations
;
235 unsigned int num_edges
;
236 unsigned int num_implicit_edges
;
237 unsigned int points_to_sets_created
;
242 /* ID of this variable */
245 /* True if this is a variable created by the constraint analysis, such as
246 heap variables and constraints we had to break up. */
247 unsigned int is_artificial_var
: 1;
249 /* True if this is a special variable whose solution set should not be
251 unsigned int is_special_var
: 1;
253 /* True for variables whose size is not known or variable. */
254 unsigned int is_unknown_size_var
: 1;
256 /* True for (sub-)fields that represent a whole variable. */
257 unsigned int is_full_var
: 1;
259 /* True if this is a heap variable. */
260 unsigned int is_heap_var
: 1;
262 /* True if this field may contain pointers. */
263 unsigned int may_have_pointers
: 1;
265 /* True if this field has only restrict qualified pointers. */
266 unsigned int only_restrict_pointers
: 1;
268 /* True if this represents a global variable. */
269 unsigned int is_global_var
: 1;
271 /* True if this represents a IPA function info. */
272 unsigned int is_fn_info
: 1;
274 /* The ID of the variable for the next field in this structure
275 or zero for the last field in this structure. */
278 /* The ID of the variable for the first field in this structure. */
281 /* Offset of this variable, in bits, from the base variable */
282 unsigned HOST_WIDE_INT offset
;
284 /* Size of the variable, in bits. */
285 unsigned HOST_WIDE_INT size
;
287 /* Full size of the base variable, in bits. */
288 unsigned HOST_WIDE_INT fullsize
;
290 /* Name of this variable */
293 /* Tree that this variable is associated with. */
296 /* Points-to set for this variable. */
299 /* Old points-to set for this variable. */
302 typedef struct variable_info
*varinfo_t
;
304 static varinfo_t
first_vi_for_offset (varinfo_t
, unsigned HOST_WIDE_INT
);
305 static varinfo_t
first_or_preceding_vi_for_offset (varinfo_t
,
306 unsigned HOST_WIDE_INT
);
307 static varinfo_t
lookup_vi_for_tree (tree
);
308 static inline bool type_can_have_subvars (const_tree
);
310 /* Pool of variable info structures. */
311 static alloc_pool variable_info_pool
;
313 /* Map varinfo to final pt_solution. */
314 static pointer_map_t
*final_solutions
;
315 struct obstack final_solutions_obstack
;
317 /* Table of variable info structures for constraint variables.
318 Indexed directly by variable info id. */
319 static vec
<varinfo_t
> varmap
;
321 /* Return the varmap element N */
323 static inline varinfo_t
324 get_varinfo (unsigned int n
)
329 /* Return the next variable in the list of sub-variables of VI
330 or NULL if VI is the last sub-variable. */
332 static inline varinfo_t
333 vi_next (varinfo_t vi
)
335 return get_varinfo (vi
->next
);
338 /* Static IDs for the special variables. Variable ID zero is unused
339 and used as terminator for the sub-variable chain. */
340 enum { nothing_id
= 1, anything_id
= 2, readonly_id
= 3,
341 escaped_id
= 4, nonlocal_id
= 5,
342 storedanything_id
= 6, integer_id
= 7 };
344 /* Return a new variable info structure consisting for a variable
345 named NAME, and using constraint graph node NODE. Append it
346 to the vector of variable info structures. */
349 new_var_info (tree t
, const char *name
)
351 unsigned index
= varmap
.length ();
352 varinfo_t ret
= (varinfo_t
) pool_alloc (variable_info_pool
);
357 /* Vars without decl are artificial and do not have sub-variables. */
358 ret
->is_artificial_var
= (t
== NULL_TREE
);
359 ret
->is_special_var
= false;
360 ret
->is_unknown_size_var
= false;
361 ret
->is_full_var
= (t
== NULL_TREE
);
362 ret
->is_heap_var
= false;
363 ret
->may_have_pointers
= true;
364 ret
->only_restrict_pointers
= false;
365 ret
->is_global_var
= (t
== NULL_TREE
);
366 ret
->is_fn_info
= false;
368 ret
->is_global_var
= (is_global_var (t
)
369 /* We have to treat even local register variables
371 || (TREE_CODE (t
) == VAR_DECL
372 && DECL_HARD_REGISTER (t
)));
373 ret
->solution
= BITMAP_ALLOC (&pta_obstack
);
374 ret
->oldsolution
= NULL
;
380 varmap
.safe_push (ret
);
386 /* A map mapping call statements to per-stmt variables for uses
387 and clobbers specific to the call. */
388 static struct pointer_map_t
*call_stmt_vars
;
390 /* Lookup or create the variable for the call statement CALL. */
393 get_call_vi (gimple call
)
398 slot_p
= pointer_map_insert (call_stmt_vars
, call
);
400 return (varinfo_t
) *slot_p
;
402 vi
= new_var_info (NULL_TREE
, "CALLUSED");
406 vi
->is_full_var
= true;
408 vi2
= new_var_info (NULL_TREE
, "CALLCLOBBERED");
412 vi2
->is_full_var
= true;
416 *slot_p
= (void *) vi
;
420 /* Lookup the variable for the call statement CALL representing
421 the uses. Returns NULL if there is nothing special about this call. */
424 lookup_call_use_vi (gimple call
)
428 slot_p
= pointer_map_contains (call_stmt_vars
, call
);
430 return (varinfo_t
) *slot_p
;
435 /* Lookup the variable for the call statement CALL representing
436 the clobbers. Returns NULL if there is nothing special about this call. */
439 lookup_call_clobber_vi (gimple call
)
441 varinfo_t uses
= lookup_call_use_vi (call
);
445 return vi_next (uses
);
448 /* Lookup or create the variable for the call statement CALL representing
452 get_call_use_vi (gimple call
)
454 return get_call_vi (call
);
457 /* Lookup or create the variable for the call statement CALL representing
460 static varinfo_t ATTRIBUTE_UNUSED
461 get_call_clobber_vi (gimple call
)
463 return vi_next (get_call_vi (call
));
467 typedef enum {SCALAR
, DEREF
, ADDRESSOF
} constraint_expr_type
;
469 /* An expression that appears in a constraint. */
471 struct constraint_expr
473 /* Constraint type. */
474 constraint_expr_type type
;
476 /* Variable we are referring to in the constraint. */
479 /* Offset, in bits, of this constraint from the beginning of
480 variables it ends up referring to.
482 IOW, in a deref constraint, we would deref, get the result set,
483 then add OFFSET to each member. */
484 HOST_WIDE_INT offset
;
487 /* Use 0x8000... as special unknown offset. */
488 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
490 typedef struct constraint_expr ce_s
;
491 static void get_constraint_for_1 (tree
, vec
<ce_s
> *, bool, bool);
492 static void get_constraint_for (tree
, vec
<ce_s
> *);
493 static void get_constraint_for_rhs (tree
, vec
<ce_s
> *);
494 static void do_deref (vec
<ce_s
> *);
496 /* Our set constraints are made up of two constraint expressions, one
499 As described in the introduction, our set constraints each represent an
500 operation between set valued variables.
504 struct constraint_expr lhs
;
505 struct constraint_expr rhs
;
508 /* List of constraints that we use to build the constraint graph from. */
510 static vec
<constraint_t
> constraints
;
511 static alloc_pool constraint_pool
;
513 /* The constraint graph is represented as an array of bitmaps
514 containing successor nodes. */
516 struct constraint_graph
518 /* Size of this graph, which may be different than the number of
519 nodes in the variable map. */
522 /* Explicit successors of each node. */
525 /* Implicit predecessors of each node (Used for variable
527 bitmap
*implicit_preds
;
529 /* Explicit predecessors of each node (Used for variable substitution). */
532 /* Indirect cycle representatives, or -1 if the node has no indirect
534 int *indirect_cycles
;
536 /* Representative node for a node. rep[a] == a unless the node has
540 /* Equivalence class representative for a label. This is used for
541 variable substitution. */
544 /* Pointer equivalence label for a node. All nodes with the same
545 pointer equivalence label can be unified together at some point
546 (either during constraint optimization or after the constraint
550 /* Pointer equivalence representative for a label. This is used to
551 handle nodes that are pointer equivalent but not location
552 equivalent. We can unite these once the addressof constraints
553 are transformed into initial points-to sets. */
556 /* Pointer equivalence label for each node, used during variable
558 unsigned int *pointer_label
;
560 /* Location equivalence label for each node, used during location
561 equivalence finding. */
562 unsigned int *loc_label
;
564 /* Pointed-by set for each node, used during location equivalence
565 finding. This is pointed-by rather than pointed-to, because it
566 is constructed using the predecessor graph. */
569 /* Points to sets for pointer equivalence. This is *not* the actual
570 points-to sets for nodes. */
573 /* Bitmap of nodes where the bit is set if the node is a direct
574 node. Used for variable substitution. */
575 sbitmap direct_nodes
;
577 /* Bitmap of nodes where the bit is set if the node is address
578 taken. Used for variable substitution. */
579 bitmap address_taken
;
581 /* Vector of complex constraints for each graph node. Complex
582 constraints are those involving dereferences or offsets that are
584 vec
<constraint_t
> *complex;
587 static constraint_graph_t graph
;
589 /* During variable substitution and the offline version of indirect
590 cycle finding, we create nodes to represent dereferences and
591 address taken constraints. These represent where these start and
593 #define FIRST_REF_NODE (varmap).length ()
594 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
596 /* Return the representative node for NODE, if NODE has been unioned
598 This function performs path compression along the way to finding
599 the representative. */
602 find (unsigned int node
)
604 gcc_checking_assert (node
< graph
->size
);
605 if (graph
->rep
[node
] != node
)
606 return graph
->rep
[node
] = find (graph
->rep
[node
]);
610 /* Union the TO and FROM nodes to the TO nodes.
611 Note that at some point in the future, we may want to do
612 union-by-rank, in which case we are going to have to return the
613 node we unified to. */
616 unite (unsigned int to
, unsigned int from
)
618 gcc_checking_assert (to
< graph
->size
&& from
< graph
->size
);
619 if (to
!= from
&& graph
->rep
[from
] != to
)
621 graph
->rep
[from
] = to
;
627 /* Create a new constraint consisting of LHS and RHS expressions. */
630 new_constraint (const struct constraint_expr lhs
,
631 const struct constraint_expr rhs
)
633 constraint_t ret
= (constraint_t
) pool_alloc (constraint_pool
);
639 /* Print out constraint C to FILE. */
642 dump_constraint (FILE *file
, constraint_t c
)
644 if (c
->lhs
.type
== ADDRESSOF
)
646 else if (c
->lhs
.type
== DEREF
)
648 fprintf (file
, "%s", get_varinfo (c
->lhs
.var
)->name
);
649 if (c
->lhs
.offset
== UNKNOWN_OFFSET
)
650 fprintf (file
, " + UNKNOWN");
651 else if (c
->lhs
.offset
!= 0)
652 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->lhs
.offset
);
653 fprintf (file
, " = ");
654 if (c
->rhs
.type
== ADDRESSOF
)
656 else if (c
->rhs
.type
== DEREF
)
658 fprintf (file
, "%s", get_varinfo (c
->rhs
.var
)->name
);
659 if (c
->rhs
.offset
== UNKNOWN_OFFSET
)
660 fprintf (file
, " + UNKNOWN");
661 else if (c
->rhs
.offset
!= 0)
662 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->rhs
.offset
);
666 void debug_constraint (constraint_t
);
667 void debug_constraints (void);
668 void debug_constraint_graph (void);
669 void debug_solution_for_var (unsigned int);
670 void debug_sa_points_to_info (void);
672 /* Print out constraint C to stderr. */
675 debug_constraint (constraint_t c
)
677 dump_constraint (stderr
, c
);
678 fprintf (stderr
, "\n");
681 /* Print out all constraints to FILE */
684 dump_constraints (FILE *file
, int from
)
688 for (i
= from
; constraints
.iterate (i
, &c
); i
++)
691 dump_constraint (file
, c
);
692 fprintf (file
, "\n");
696 /* Print out all constraints to stderr. */
699 debug_constraints (void)
701 dump_constraints (stderr
, 0);
704 /* Print the constraint graph in dot format. */
707 dump_constraint_graph (FILE *file
)
711 /* Only print the graph if it has already been initialized: */
715 /* Prints the header of the dot file: */
716 fprintf (file
, "strict digraph {\n");
717 fprintf (file
, " node [\n shape = box\n ]\n");
718 fprintf (file
, " edge [\n fontsize = \"12\"\n ]\n");
719 fprintf (file
, "\n // List of nodes and complex constraints in "
720 "the constraint graph:\n");
722 /* The next lines print the nodes in the graph together with the
723 complex constraints attached to them. */
724 for (i
= 1; i
< graph
->size
; i
++)
726 if (i
== FIRST_REF_NODE
)
730 if (i
< FIRST_REF_NODE
)
731 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
733 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
734 if (graph
->complex[i
].exists ())
738 fprintf (file
, " [label=\"\\N\\n");
739 for (j
= 0; graph
->complex[i
].iterate (j
, &c
); ++j
)
741 dump_constraint (file
, c
);
742 fprintf (file
, "\\l");
744 fprintf (file
, "\"]");
746 fprintf (file
, ";\n");
749 /* Go over the edges. */
750 fprintf (file
, "\n // Edges in the constraint graph:\n");
751 for (i
= 1; i
< graph
->size
; i
++)
757 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
], 0, j
, bi
)
759 unsigned to
= find (j
);
762 if (i
< FIRST_REF_NODE
)
763 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
765 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
766 fprintf (file
, " -> ");
767 if (to
< FIRST_REF_NODE
)
768 fprintf (file
, "\"%s\"", get_varinfo (to
)->name
);
770 fprintf (file
, "\"*%s\"", get_varinfo (to
- FIRST_REF_NODE
)->name
);
771 fprintf (file
, ";\n");
775 /* Prints the tail of the dot file. */
776 fprintf (file
, "}\n");
779 /* Print out the constraint graph to stderr. */
782 debug_constraint_graph (void)
784 dump_constraint_graph (stderr
);
789 The solver is a simple worklist solver, that works on the following
792 sbitmap changed_nodes = all zeroes;
794 For each node that is not already collapsed:
796 set bit in changed nodes
798 while (changed_count > 0)
800 compute topological ordering for constraint graph
802 find and collapse cycles in the constraint graph (updating
803 changed if necessary)
805 for each node (n) in the graph in topological order:
808 Process each complex constraint associated with the node,
809 updating changed if necessary.
811 For each outgoing edge from n, propagate the solution from n to
812 the destination of the edge, updating changed as necessary.
816 /* Return true if two constraint expressions A and B are equal. */
819 constraint_expr_equal (struct constraint_expr a
, struct constraint_expr b
)
821 return a
.type
== b
.type
&& a
.var
== b
.var
&& a
.offset
== b
.offset
;
824 /* Return true if constraint expression A is less than constraint expression
825 B. This is just arbitrary, but consistent, in order to give them an
829 constraint_expr_less (struct constraint_expr a
, struct constraint_expr b
)
831 if (a
.type
== b
.type
)
834 return a
.offset
< b
.offset
;
836 return a
.var
< b
.var
;
839 return a
.type
< b
.type
;
842 /* Return true if constraint A is less than constraint B. This is just
843 arbitrary, but consistent, in order to give them an ordering. */
846 constraint_less (const constraint_t
&a
, const constraint_t
&b
)
848 if (constraint_expr_less (a
->lhs
, b
->lhs
))
850 else if (constraint_expr_less (b
->lhs
, a
->lhs
))
853 return constraint_expr_less (a
->rhs
, b
->rhs
);
856 /* Return true if two constraints A and B are equal. */
859 constraint_equal (struct constraint a
, struct constraint b
)
861 return constraint_expr_equal (a
.lhs
, b
.lhs
)
862 && constraint_expr_equal (a
.rhs
, b
.rhs
);
866 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
869 constraint_vec_find (vec
<constraint_t
> vec
,
870 struct constraint lookfor
)
878 place
= vec
.lower_bound (&lookfor
, constraint_less
);
879 if (place
>= vec
.length ())
882 if (!constraint_equal (*found
, lookfor
))
887 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
890 constraint_set_union (vec
<constraint_t
> *to
,
891 vec
<constraint_t
> *from
)
896 FOR_EACH_VEC_ELT (*from
, i
, c
)
898 if (constraint_vec_find (*to
, *c
) == NULL
)
900 unsigned int place
= to
->lower_bound (c
, constraint_less
);
901 to
->safe_insert (place
, c
);
906 /* Expands the solution in SET to all sub-fields of variables included. */
909 solution_set_expand (bitmap set
)
914 /* In a first pass expand to the head of the variables we need to
915 add all sub-fields off. This avoids quadratic behavior. */
916 EXECUTE_IF_SET_IN_BITMAP (set
, 0, j
, bi
)
918 varinfo_t v
= get_varinfo (j
);
919 if (v
->is_artificial_var
922 bitmap_set_bit (set
, v
->head
);
925 /* In the second pass now expand all head variables with subfields. */
926 EXECUTE_IF_SET_IN_BITMAP (set
, 0, j
, bi
)
928 varinfo_t v
= get_varinfo (j
);
929 if (v
->is_artificial_var
933 for (v
= vi_next (v
); v
!= NULL
; v
= vi_next (v
))
934 bitmap_set_bit (set
, v
->id
);
938 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
942 set_union_with_increment (bitmap to
, bitmap from
, HOST_WIDE_INT inc
)
944 bool changed
= false;
948 /* If the solution of FROM contains anything it is good enough to transfer
950 if (bitmap_bit_p (from
, anything_id
))
951 return bitmap_set_bit (to
, anything_id
);
953 /* For zero offset simply union the solution into the destination. */
955 return bitmap_ior_into (to
, from
);
957 /* If the offset is unknown we have to expand the solution to
959 if (inc
== UNKNOWN_OFFSET
)
961 bitmap tmp
= BITMAP_ALLOC (&iteration_obstack
);
962 bitmap_copy (tmp
, from
);
963 solution_set_expand (tmp
);
964 changed
|= bitmap_ior_into (to
, tmp
);
969 /* For non-zero offset union the offsetted solution into the destination. */
970 EXECUTE_IF_SET_IN_BITMAP (from
, 0, i
, bi
)
972 varinfo_t vi
= get_varinfo (i
);
974 /* If this is a variable with just one field just set its bit
976 if (vi
->is_artificial_var
977 || vi
->is_unknown_size_var
979 changed
|= bitmap_set_bit (to
, i
);
982 unsigned HOST_WIDE_INT fieldoffset
= vi
->offset
+ inc
;
984 /* If the offset makes the pointer point to before the
985 variable use offset zero for the field lookup. */
987 && fieldoffset
> vi
->offset
)
990 vi
= first_or_preceding_vi_for_offset (vi
, fieldoffset
);
992 changed
|= bitmap_set_bit (to
, vi
->id
);
993 /* If the result is not exactly at fieldoffset include the next
994 field as well. See get_constraint_for_ptr_offset for more
996 if (vi
->offset
!= fieldoffset
998 changed
|= bitmap_set_bit (to
, vi
->next
);
1005 /* Insert constraint C into the list of complex constraints for graph
1009 insert_into_complex (constraint_graph_t graph
,
1010 unsigned int var
, constraint_t c
)
1012 vec
<constraint_t
> complex = graph
->complex[var
];
1013 unsigned int place
= complex.lower_bound (c
, constraint_less
);
1015 /* Only insert constraints that do not already exist. */
1016 if (place
>= complex.length ()
1017 || !constraint_equal (*c
, *complex[place
]))
1018 graph
->complex[var
].safe_insert (place
, c
);
1022 /* Condense two variable nodes into a single variable node, by moving
1023 all associated info from SRC to TO. */
1026 merge_node_constraints (constraint_graph_t graph
, unsigned int to
,
1032 gcc_checking_assert (find (from
) == to
);
1034 /* Move all complex constraints from src node into to node */
1035 FOR_EACH_VEC_ELT (graph
->complex[from
], i
, c
)
1037 /* In complex constraints for node src, we may have either
1038 a = *src, and *src = a, or an offseted constraint which are
1039 always added to the rhs node's constraints. */
1041 if (c
->rhs
.type
== DEREF
)
1043 else if (c
->lhs
.type
== DEREF
)
1048 constraint_set_union (&graph
->complex[to
], &graph
->complex[from
]);
1049 graph
->complex[from
].release ();
1053 /* Remove edges involving NODE from GRAPH. */
1056 clear_edges_for_node (constraint_graph_t graph
, unsigned int node
)
1058 if (graph
->succs
[node
])
1059 BITMAP_FREE (graph
->succs
[node
]);
1062 /* Merge GRAPH nodes FROM and TO into node TO. */
1065 merge_graph_nodes (constraint_graph_t graph
, unsigned int to
,
1068 if (graph
->indirect_cycles
[from
] != -1)
1070 /* If we have indirect cycles with the from node, and we have
1071 none on the to node, the to node has indirect cycles from the
1072 from node now that they are unified.
1073 If indirect cycles exist on both, unify the nodes that they
1074 are in a cycle with, since we know they are in a cycle with
1076 if (graph
->indirect_cycles
[to
] == -1)
1077 graph
->indirect_cycles
[to
] = graph
->indirect_cycles
[from
];
1080 /* Merge all the successor edges. */
1081 if (graph
->succs
[from
])
1083 if (!graph
->succs
[to
])
1084 graph
->succs
[to
] = BITMAP_ALLOC (&pta_obstack
);
1085 bitmap_ior_into (graph
->succs
[to
],
1086 graph
->succs
[from
]);
1089 clear_edges_for_node (graph
, from
);
1093 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1094 it doesn't exist in the graph already. */
1097 add_implicit_graph_edge (constraint_graph_t graph
, unsigned int to
,
1103 if (!graph
->implicit_preds
[to
])
1104 graph
->implicit_preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
1106 if (bitmap_set_bit (graph
->implicit_preds
[to
], from
))
1107 stats
.num_implicit_edges
++;
1110 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1111 it doesn't exist in the graph already.
1112 Return false if the edge already existed, true otherwise. */
1115 add_pred_graph_edge (constraint_graph_t graph
, unsigned int to
,
1118 if (!graph
->preds
[to
])
1119 graph
->preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
1120 bitmap_set_bit (graph
->preds
[to
], from
);
1123 /* Add a graph edge to GRAPH, going from FROM to TO if
1124 it doesn't exist in the graph already.
1125 Return false if the edge already existed, true otherwise. */
1128 add_graph_edge (constraint_graph_t graph
, unsigned int to
,
1139 if (!graph
->succs
[from
])
1140 graph
->succs
[from
] = BITMAP_ALLOC (&pta_obstack
);
1141 if (bitmap_set_bit (graph
->succs
[from
], to
))
1144 if (to
< FIRST_REF_NODE
&& from
< FIRST_REF_NODE
)
1152 /* Initialize the constraint graph structure to contain SIZE nodes. */
1155 init_graph (unsigned int size
)
1159 graph
= XCNEW (struct constraint_graph
);
1161 graph
->succs
= XCNEWVEC (bitmap
, graph
->size
);
1162 graph
->indirect_cycles
= XNEWVEC (int, graph
->size
);
1163 graph
->rep
= XNEWVEC (unsigned int, graph
->size
);
1164 /* ??? Macros do not support template types with multiple arguments,
1165 so we use a typedef to work around it. */
1166 typedef vec
<constraint_t
> vec_constraint_t_heap
;
1167 graph
->complex = XCNEWVEC (vec_constraint_t_heap
, size
);
1168 graph
->pe
= XCNEWVEC (unsigned int, graph
->size
);
1169 graph
->pe_rep
= XNEWVEC (int, graph
->size
);
1171 for (j
= 0; j
< graph
->size
; j
++)
1174 graph
->pe_rep
[j
] = -1;
1175 graph
->indirect_cycles
[j
] = -1;
1179 /* Build the constraint graph, adding only predecessor edges right now. */
1182 build_pred_graph (void)
1188 graph
->implicit_preds
= XCNEWVEC (bitmap
, graph
->size
);
1189 graph
->preds
= XCNEWVEC (bitmap
, graph
->size
);
1190 graph
->pointer_label
= XCNEWVEC (unsigned int, graph
->size
);
1191 graph
->loc_label
= XCNEWVEC (unsigned int, graph
->size
);
1192 graph
->pointed_by
= XCNEWVEC (bitmap
, graph
->size
);
1193 graph
->points_to
= XCNEWVEC (bitmap
, graph
->size
);
1194 graph
->eq_rep
= XNEWVEC (int, graph
->size
);
1195 graph
->direct_nodes
= sbitmap_alloc (graph
->size
);
1196 graph
->address_taken
= BITMAP_ALLOC (&predbitmap_obstack
);
1197 bitmap_clear (graph
->direct_nodes
);
1199 for (j
= 1; j
< FIRST_REF_NODE
; j
++)
1201 if (!get_varinfo (j
)->is_special_var
)
1202 bitmap_set_bit (graph
->direct_nodes
, j
);
1205 for (j
= 0; j
< graph
->size
; j
++)
1206 graph
->eq_rep
[j
] = -1;
1208 for (j
= 0; j
< varmap
.length (); j
++)
1209 graph
->indirect_cycles
[j
] = -1;
1211 FOR_EACH_VEC_ELT (constraints
, i
, c
)
1213 struct constraint_expr lhs
= c
->lhs
;
1214 struct constraint_expr rhs
= c
->rhs
;
1215 unsigned int lhsvar
= lhs
.var
;
1216 unsigned int rhsvar
= rhs
.var
;
1218 if (lhs
.type
== DEREF
)
1221 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1222 add_pred_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1224 else if (rhs
.type
== DEREF
)
1227 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1228 add_pred_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1230 bitmap_clear_bit (graph
->direct_nodes
, lhsvar
);
1232 else if (rhs
.type
== ADDRESSOF
)
1237 if (graph
->points_to
[lhsvar
] == NULL
)
1238 graph
->points_to
[lhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1239 bitmap_set_bit (graph
->points_to
[lhsvar
], rhsvar
);
1241 if (graph
->pointed_by
[rhsvar
] == NULL
)
1242 graph
->pointed_by
[rhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1243 bitmap_set_bit (graph
->pointed_by
[rhsvar
], lhsvar
);
1245 /* Implicitly, *x = y */
1246 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1248 /* All related variables are no longer direct nodes. */
1249 bitmap_clear_bit (graph
->direct_nodes
, rhsvar
);
1250 v
= get_varinfo (rhsvar
);
1251 if (!v
->is_full_var
)
1253 v
= get_varinfo (v
->head
);
1256 bitmap_clear_bit (graph
->direct_nodes
, v
->id
);
1261 bitmap_set_bit (graph
->address_taken
, rhsvar
);
1263 else if (lhsvar
> anything_id
1264 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1267 add_pred_graph_edge (graph
, lhsvar
, rhsvar
);
1268 /* Implicitly, *x = *y */
1269 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
,
1270 FIRST_REF_NODE
+ rhsvar
);
1272 else if (lhs
.offset
!= 0 || rhs
.offset
!= 0)
1274 if (rhs
.offset
!= 0)
1275 bitmap_clear_bit (graph
->direct_nodes
, lhs
.var
);
1276 else if (lhs
.offset
!= 0)
1277 bitmap_clear_bit (graph
->direct_nodes
, rhs
.var
);
1282 /* Build the constraint graph, adding successor edges. */
1285 build_succ_graph (void)
1290 FOR_EACH_VEC_ELT (constraints
, i
, c
)
1292 struct constraint_expr lhs
;
1293 struct constraint_expr rhs
;
1294 unsigned int lhsvar
;
1295 unsigned int rhsvar
;
1302 lhsvar
= find (lhs
.var
);
1303 rhsvar
= find (rhs
.var
);
1305 if (lhs
.type
== DEREF
)
1307 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1308 add_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1310 else if (rhs
.type
== DEREF
)
1312 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1313 add_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1315 else if (rhs
.type
== ADDRESSOF
)
1318 gcc_checking_assert (find (rhs
.var
) == rhs
.var
);
1319 bitmap_set_bit (get_varinfo (lhsvar
)->solution
, rhsvar
);
1321 else if (lhsvar
> anything_id
1322 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1324 add_graph_edge (graph
, lhsvar
, rhsvar
);
1328 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1329 receive pointers. */
1330 t
= find (storedanything_id
);
1331 for (i
= integer_id
+ 1; i
< FIRST_REF_NODE
; ++i
)
1333 if (!bitmap_bit_p (graph
->direct_nodes
, i
)
1334 && get_varinfo (i
)->may_have_pointers
)
1335 add_graph_edge (graph
, find (i
), t
);
1338 /* Everything stored to ANYTHING also potentially escapes. */
1339 add_graph_edge (graph
, find (escaped_id
), t
);
1343 /* Changed variables on the last iteration. */
1344 static bitmap changed
;
1346 /* Strongly Connected Component visitation info. */
1353 unsigned int *node_mapping
;
1355 vec
<unsigned> scc_stack
;
1359 /* Recursive routine to find strongly connected components in GRAPH.
1360 SI is the SCC info to store the information in, and N is the id of current
1361 graph node we are processing.
1363 This is Tarjan's strongly connected component finding algorithm, as
1364 modified by Nuutila to keep only non-root nodes on the stack.
1365 The algorithm can be found in "On finding the strongly connected
1366 connected components in a directed graph" by Esko Nuutila and Eljas
1367 Soisalon-Soininen, in Information Processing Letters volume 49,
1368 number 1, pages 9-14. */
1371 scc_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
1375 unsigned int my_dfs
;
1377 bitmap_set_bit (si
->visited
, n
);
1378 si
->dfs
[n
] = si
->current_index
++;
1379 my_dfs
= si
->dfs
[n
];
1381 /* Visit all the successors. */
1382 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[n
], 0, i
, bi
)
1386 if (i
> LAST_REF_NODE
)
1390 if (bitmap_bit_p (si
->deleted
, w
))
1393 if (!bitmap_bit_p (si
->visited
, w
))
1394 scc_visit (graph
, si
, w
);
1396 unsigned int t
= find (w
);
1397 gcc_checking_assert (find (n
) == n
);
1398 if (si
->dfs
[t
] < si
->dfs
[n
])
1399 si
->dfs
[n
] = si
->dfs
[t
];
1402 /* See if any components have been identified. */
1403 if (si
->dfs
[n
] == my_dfs
)
1405 if (si
->scc_stack
.length () > 0
1406 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
1408 bitmap scc
= BITMAP_ALLOC (NULL
);
1409 unsigned int lowest_node
;
1412 bitmap_set_bit (scc
, n
);
1414 while (si
->scc_stack
.length () != 0
1415 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
1417 unsigned int w
= si
->scc_stack
.pop ();
1419 bitmap_set_bit (scc
, w
);
1422 lowest_node
= bitmap_first_set_bit (scc
);
1423 gcc_assert (lowest_node
< FIRST_REF_NODE
);
1425 /* Collapse the SCC nodes into a single node, and mark the
1427 EXECUTE_IF_SET_IN_BITMAP (scc
, 0, i
, bi
)
1429 if (i
< FIRST_REF_NODE
)
1431 if (unite (lowest_node
, i
))
1432 unify_nodes (graph
, lowest_node
, i
, false);
1436 unite (lowest_node
, i
);
1437 graph
->indirect_cycles
[i
- FIRST_REF_NODE
] = lowest_node
;
1441 bitmap_set_bit (si
->deleted
, n
);
1444 si
->scc_stack
.safe_push (n
);
1447 /* Unify node FROM into node TO, updating the changed count if
1448 necessary when UPDATE_CHANGED is true. */
1451 unify_nodes (constraint_graph_t graph
, unsigned int to
, unsigned int from
,
1452 bool update_changed
)
1454 gcc_checking_assert (to
!= from
&& find (to
) == to
);
1456 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1457 fprintf (dump_file
, "Unifying %s to %s\n",
1458 get_varinfo (from
)->name
,
1459 get_varinfo (to
)->name
);
1462 stats
.unified_vars_dynamic
++;
1464 stats
.unified_vars_static
++;
1466 merge_graph_nodes (graph
, to
, from
);
1467 merge_node_constraints (graph
, to
, from
);
1469 /* Mark TO as changed if FROM was changed. If TO was already marked
1470 as changed, decrease the changed count. */
1473 && bitmap_clear_bit (changed
, from
))
1474 bitmap_set_bit (changed
, to
);
1475 varinfo_t fromvi
= get_varinfo (from
);
1476 if (fromvi
->solution
)
1478 /* If the solution changes because of the merging, we need to mark
1479 the variable as changed. */
1480 varinfo_t tovi
= get_varinfo (to
);
1481 if (bitmap_ior_into (tovi
->solution
, fromvi
->solution
))
1484 bitmap_set_bit (changed
, to
);
1487 BITMAP_FREE (fromvi
->solution
);
1488 if (fromvi
->oldsolution
)
1489 BITMAP_FREE (fromvi
->oldsolution
);
1491 if (stats
.iterations
> 0
1492 && tovi
->oldsolution
)
1493 BITMAP_FREE (tovi
->oldsolution
);
1495 if (graph
->succs
[to
])
1496 bitmap_clear_bit (graph
->succs
[to
], to
);
1499 /* Information needed to compute the topological ordering of a graph. */
1503 /* sbitmap of visited nodes. */
1505 /* Array that stores the topological order of the graph, *in
1507 vec
<unsigned> topo_order
;
1511 /* Initialize and return a topological info structure. */
1513 static struct topo_info
*
1514 init_topo_info (void)
1516 size_t size
= graph
->size
;
1517 struct topo_info
*ti
= XNEW (struct topo_info
);
1518 ti
->visited
= sbitmap_alloc (size
);
1519 bitmap_clear (ti
->visited
);
1520 ti
->topo_order
.create (1);
1525 /* Free the topological sort info pointed to by TI. */
1528 free_topo_info (struct topo_info
*ti
)
1530 sbitmap_free (ti
->visited
);
1531 ti
->topo_order
.release ();
1535 /* Visit the graph in topological order, and store the order in the
1536 topo_info structure. */
1539 topo_visit (constraint_graph_t graph
, struct topo_info
*ti
,
1545 bitmap_set_bit (ti
->visited
, n
);
1547 if (graph
->succs
[n
])
1548 EXECUTE_IF_SET_IN_BITMAP (graph
->succs
[n
], 0, j
, bi
)
1550 if (!bitmap_bit_p (ti
->visited
, j
))
1551 topo_visit (graph
, ti
, j
);
1554 ti
->topo_order
.safe_push (n
);
1557 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1558 starting solution for y. */
1561 do_sd_constraint (constraint_graph_t graph
, constraint_t c
,
1564 unsigned int lhs
= c
->lhs
.var
;
1566 bitmap sol
= get_varinfo (lhs
)->solution
;
1569 HOST_WIDE_INT roffset
= c
->rhs
.offset
;
1571 /* Our IL does not allow this. */
1572 gcc_checking_assert (c
->lhs
.offset
== 0);
1574 /* If the solution of Y contains anything it is good enough to transfer
1576 if (bitmap_bit_p (delta
, anything_id
))
1578 flag
|= bitmap_set_bit (sol
, anything_id
);
1582 /* If we do not know at with offset the rhs is dereferenced compute
1583 the reachability set of DELTA, conservatively assuming it is
1584 dereferenced at all valid offsets. */
1585 if (roffset
== UNKNOWN_OFFSET
)
1587 solution_set_expand (delta
);
1588 /* No further offset processing is necessary. */
1592 /* For each variable j in delta (Sol(y)), add
1593 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1594 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1596 varinfo_t v
= get_varinfo (j
);
1597 HOST_WIDE_INT fieldoffset
= v
->offset
+ roffset
;
1601 fieldoffset
= v
->offset
;
1602 else if (roffset
!= 0)
1603 v
= first_vi_for_offset (v
, fieldoffset
);
1604 /* If the access is outside of the variable we can ignore it. */
1612 /* Adding edges from the special vars is pointless.
1613 They don't have sets that can change. */
1614 if (get_varinfo (t
)->is_special_var
)
1615 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1616 /* Merging the solution from ESCAPED needlessly increases
1617 the set. Use ESCAPED as representative instead. */
1618 else if (v
->id
== escaped_id
)
1619 flag
|= bitmap_set_bit (sol
, escaped_id
);
1620 else if (v
->may_have_pointers
1621 && add_graph_edge (graph
, lhs
, t
))
1622 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1624 /* If the variable is not exactly at the requested offset
1625 we have to include the next one. */
1626 if (v
->offset
== (unsigned HOST_WIDE_INT
)fieldoffset
1631 fieldoffset
= v
->offset
;
1637 /* If the LHS solution changed, mark the var as changed. */
1640 get_varinfo (lhs
)->solution
= sol
;
1641 bitmap_set_bit (changed
, lhs
);
1645 /* Process a constraint C that represents *(x + off) = y using DELTA
1646 as the starting solution for x. */
1649 do_ds_constraint (constraint_t c
, bitmap delta
)
1651 unsigned int rhs
= c
->rhs
.var
;
1652 bitmap sol
= get_varinfo (rhs
)->solution
;
1655 HOST_WIDE_INT loff
= c
->lhs
.offset
;
1656 bool escaped_p
= false;
1658 /* Our IL does not allow this. */
1659 gcc_checking_assert (c
->rhs
.offset
== 0);
1661 /* If the solution of y contains ANYTHING simply use the ANYTHING
1662 solution. This avoids needlessly increasing the points-to sets. */
1663 if (bitmap_bit_p (sol
, anything_id
))
1664 sol
= get_varinfo (find (anything_id
))->solution
;
1666 /* If the solution for x contains ANYTHING we have to merge the
1667 solution of y into all pointer variables which we do via
1669 if (bitmap_bit_p (delta
, anything_id
))
1671 unsigned t
= find (storedanything_id
);
1672 if (add_graph_edge (graph
, t
, rhs
))
1674 if (bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1675 bitmap_set_bit (changed
, t
);
1680 /* If we do not know at with offset the rhs is dereferenced compute
1681 the reachability set of DELTA, conservatively assuming it is
1682 dereferenced at all valid offsets. */
1683 if (loff
== UNKNOWN_OFFSET
)
1685 solution_set_expand (delta
);
1689 /* For each member j of delta (Sol(x)), add an edge from y to j and
1690 union Sol(y) into Sol(j) */
1691 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1693 varinfo_t v
= get_varinfo (j
);
1695 HOST_WIDE_INT fieldoffset
= v
->offset
+ loff
;
1698 fieldoffset
= v
->offset
;
1700 v
= first_vi_for_offset (v
, fieldoffset
);
1701 /* If the access is outside of the variable we can ignore it. */
1707 if (v
->may_have_pointers
)
1709 /* If v is a global variable then this is an escape point. */
1710 if (v
->is_global_var
1713 t
= find (escaped_id
);
1714 if (add_graph_edge (graph
, t
, rhs
)
1715 && bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1716 bitmap_set_bit (changed
, t
);
1717 /* Enough to let rhs escape once. */
1721 if (v
->is_special_var
)
1725 if (add_graph_edge (graph
, t
, rhs
)
1726 && bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1727 bitmap_set_bit (changed
, t
);
1730 /* If the variable is not exactly at the requested offset
1731 we have to include the next one. */
1732 if (v
->offset
== (unsigned HOST_WIDE_INT
)fieldoffset
1737 fieldoffset
= v
->offset
;
1743 /* Handle a non-simple (simple meaning requires no iteration),
1744 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1747 do_complex_constraint (constraint_graph_t graph
, constraint_t c
, bitmap delta
)
1749 if (c
->lhs
.type
== DEREF
)
1751 if (c
->rhs
.type
== ADDRESSOF
)
1758 do_ds_constraint (c
, delta
);
1761 else if (c
->rhs
.type
== DEREF
)
1764 if (!(get_varinfo (c
->lhs
.var
)->is_special_var
))
1765 do_sd_constraint (graph
, c
, delta
);
1773 gcc_checking_assert (c
->rhs
.type
== SCALAR
&& c
->lhs
.type
== SCALAR
);
1774 solution
= get_varinfo (c
->rhs
.var
)->solution
;
1775 tmp
= get_varinfo (c
->lhs
.var
)->solution
;
1777 flag
= set_union_with_increment (tmp
, solution
, c
->rhs
.offset
);
1780 bitmap_set_bit (changed
, c
->lhs
.var
);
1784 /* Initialize and return a new SCC info structure. */
1786 static struct scc_info
*
1787 init_scc_info (size_t size
)
1789 struct scc_info
*si
= XNEW (struct scc_info
);
1792 si
->current_index
= 0;
1793 si
->visited
= sbitmap_alloc (size
);
1794 bitmap_clear (si
->visited
);
1795 si
->deleted
= sbitmap_alloc (size
);
1796 bitmap_clear (si
->deleted
);
1797 si
->node_mapping
= XNEWVEC (unsigned int, size
);
1798 si
->dfs
= XCNEWVEC (unsigned int, size
);
1800 for (i
= 0; i
< size
; i
++)
1801 si
->node_mapping
[i
] = i
;
1803 si
->scc_stack
.create (1);
1807 /* Free an SCC info structure pointed to by SI */
1810 free_scc_info (struct scc_info
*si
)
1812 sbitmap_free (si
->visited
);
1813 sbitmap_free (si
->deleted
);
1814 free (si
->node_mapping
);
1816 si
->scc_stack
.release ();
1821 /* Find indirect cycles in GRAPH that occur, using strongly connected
1822 components, and note them in the indirect cycles map.
1824 This technique comes from Ben Hardekopf and Calvin Lin,
1825 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1826 Lines of Code", submitted to PLDI 2007. */
1829 find_indirect_cycles (constraint_graph_t graph
)
1832 unsigned int size
= graph
->size
;
1833 struct scc_info
*si
= init_scc_info (size
);
1835 for (i
= 0; i
< MIN (LAST_REF_NODE
, size
); i
++ )
1836 if (!bitmap_bit_p (si
->visited
, i
) && find (i
) == i
)
1837 scc_visit (graph
, si
, i
);
1842 /* Compute a topological ordering for GRAPH, and store the result in the
1843 topo_info structure TI. */
1846 compute_topo_order (constraint_graph_t graph
,
1847 struct topo_info
*ti
)
1850 unsigned int size
= graph
->size
;
1852 for (i
= 0; i
!= size
; ++i
)
1853 if (!bitmap_bit_p (ti
->visited
, i
) && find (i
) == i
)
1854 topo_visit (graph
, ti
, i
);
1857 /* Structure used to for hash value numbering of pointer equivalence
1860 typedef struct equiv_class_label
1863 unsigned int equivalence_class
;
1865 } *equiv_class_label_t
;
1866 typedef const struct equiv_class_label
*const_equiv_class_label_t
;
1868 /* Equiv_class_label hashtable helpers. */
1870 struct equiv_class_hasher
: typed_free_remove
<equiv_class_label
>
1872 typedef equiv_class_label value_type
;
1873 typedef equiv_class_label compare_type
;
1874 static inline hashval_t
hash (const value_type
*);
1875 static inline bool equal (const value_type
*, const compare_type
*);
1878 /* Hash function for a equiv_class_label_t */
1881 equiv_class_hasher::hash (const value_type
*ecl
)
1883 return ecl
->hashcode
;
1886 /* Equality function for two equiv_class_label_t's. */
1889 equiv_class_hasher::equal (const value_type
*eql1
, const compare_type
*eql2
)
1891 return (eql1
->hashcode
== eql2
->hashcode
1892 && bitmap_equal_p (eql1
->labels
, eql2
->labels
));
1895 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1897 static hash_table
<equiv_class_hasher
> pointer_equiv_class_table
;
1899 /* A hashtable for mapping a bitmap of labels->location equivalence
1901 static hash_table
<equiv_class_hasher
> location_equiv_class_table
;
1903 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1904 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1905 is equivalent to. */
1907 static equiv_class_label
*
1908 equiv_class_lookup_or_add (hash_table
<equiv_class_hasher
> table
, bitmap labels
)
1910 equiv_class_label
**slot
;
1911 equiv_class_label ecl
;
1913 ecl
.labels
= labels
;
1914 ecl
.hashcode
= bitmap_hash (labels
);
1915 slot
= table
.find_slot_with_hash (&ecl
, ecl
.hashcode
, INSERT
);
1918 *slot
= XNEW (struct equiv_class_label
);
1919 (*slot
)->labels
= labels
;
1920 (*slot
)->hashcode
= ecl
.hashcode
;
1921 (*slot
)->equivalence_class
= 0;
1927 /* Perform offline variable substitution.
1929 This is a worst case quadratic time way of identifying variables
1930 that must have equivalent points-to sets, including those caused by
1931 static cycles, and single entry subgraphs, in the constraint graph.
1933 The technique is described in "Exploiting Pointer and Location
1934 Equivalence to Optimize Pointer Analysis. In the 14th International
1935 Static Analysis Symposium (SAS), August 2007." It is known as the
1936 "HU" algorithm, and is equivalent to value numbering the collapsed
1937 constraint graph including evaluating unions.
1939 The general method of finding equivalence classes is as follows:
1940 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1941 Initialize all non-REF nodes to be direct nodes.
1942 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1944 For each constraint containing the dereference, we also do the same
1947 We then compute SCC's in the graph and unify nodes in the same SCC,
1950 For each non-collapsed node x:
1951 Visit all unvisited explicit incoming edges.
1952 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1954 Lookup the equivalence class for pts(x).
1955 If we found one, equivalence_class(x) = found class.
1956 Otherwise, equivalence_class(x) = new class, and new_class is
1957 added to the lookup table.
1959 All direct nodes with the same equivalence class can be replaced
1960 with a single representative node.
1961 All unlabeled nodes (label == 0) are not pointers and all edges
1962 involving them can be eliminated.
1963 We perform these optimizations during rewrite_constraints
1965 In addition to pointer equivalence class finding, we also perform
1966 location equivalence class finding. This is the set of variables
1967 that always appear together in points-to sets. We use this to
1968 compress the size of the points-to sets. */
1970 /* Current maximum pointer equivalence class id. */
1971 static int pointer_equiv_class
;
1973 /* Current maximum location equivalence class id. */
1974 static int location_equiv_class
;
1976 /* Recursive routine to find strongly connected components in GRAPH,
1977 and label it's nodes with DFS numbers. */
1980 condense_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
1984 unsigned int my_dfs
;
1986 gcc_checking_assert (si
->node_mapping
[n
] == n
);
1987 bitmap_set_bit (si
->visited
, n
);
1988 si
->dfs
[n
] = si
->current_index
++;
1989 my_dfs
= si
->dfs
[n
];
1991 /* Visit all the successors. */
1992 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
1994 unsigned int w
= si
->node_mapping
[i
];
1996 if (bitmap_bit_p (si
->deleted
, w
))
1999 if (!bitmap_bit_p (si
->visited
, w
))
2000 condense_visit (graph
, si
, w
);
2002 unsigned int t
= si
->node_mapping
[w
];
2003 gcc_checking_assert (si
->node_mapping
[n
] == n
);
2004 if (si
->dfs
[t
] < si
->dfs
[n
])
2005 si
->dfs
[n
] = si
->dfs
[t
];
2008 /* Visit all the implicit predecessors. */
2009 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->implicit_preds
[n
], 0, i
, bi
)
2011 unsigned int w
= si
->node_mapping
[i
];
2013 if (bitmap_bit_p (si
->deleted
, w
))
2016 if (!bitmap_bit_p (si
->visited
, w
))
2017 condense_visit (graph
, si
, w
);
2019 unsigned int t
= si
->node_mapping
[w
];
2020 gcc_assert (si
->node_mapping
[n
] == n
);
2021 if (si
->dfs
[t
] < si
->dfs
[n
])
2022 si
->dfs
[n
] = si
->dfs
[t
];
2025 /* See if any components have been identified. */
2026 if (si
->dfs
[n
] == my_dfs
)
2028 while (si
->scc_stack
.length () != 0
2029 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
2031 unsigned int w
= si
->scc_stack
.pop ();
2032 si
->node_mapping
[w
] = n
;
2034 if (!bitmap_bit_p (graph
->direct_nodes
, w
))
2035 bitmap_clear_bit (graph
->direct_nodes
, n
);
2037 /* Unify our nodes. */
2038 if (graph
->preds
[w
])
2040 if (!graph
->preds
[n
])
2041 graph
->preds
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2042 bitmap_ior_into (graph
->preds
[n
], graph
->preds
[w
]);
2044 if (graph
->implicit_preds
[w
])
2046 if (!graph
->implicit_preds
[n
])
2047 graph
->implicit_preds
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2048 bitmap_ior_into (graph
->implicit_preds
[n
],
2049 graph
->implicit_preds
[w
]);
2051 if (graph
->points_to
[w
])
2053 if (!graph
->points_to
[n
])
2054 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2055 bitmap_ior_into (graph
->points_to
[n
],
2056 graph
->points_to
[w
]);
2059 bitmap_set_bit (si
->deleted
, n
);
2062 si
->scc_stack
.safe_push (n
);
2065 /* Label pointer equivalences. */
2068 label_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
2070 unsigned int i
, first_pred
;
2073 bitmap_set_bit (si
->visited
, n
);
2075 /* Label and union our incoming edges's points to sets. */
2077 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
2079 unsigned int w
= si
->node_mapping
[i
];
2080 if (!bitmap_bit_p (si
->visited
, w
))
2081 label_visit (graph
, si
, w
);
2083 /* Skip unused edges */
2084 if (w
== n
|| graph
->pointer_label
[w
] == 0)
2087 if (graph
->points_to
[w
])
2089 if (!graph
->points_to
[n
])
2091 if (first_pred
== -1U)
2095 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2096 bitmap_ior (graph
->points_to
[n
],
2097 graph
->points_to
[first_pred
],
2098 graph
->points_to
[w
]);
2102 bitmap_ior_into (graph
->points_to
[n
], graph
->points_to
[w
]);
2106 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2107 if (!bitmap_bit_p (graph
->direct_nodes
, n
))
2109 if (!graph
->points_to
[n
])
2111 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2112 if (first_pred
!= -1U)
2113 bitmap_copy (graph
->points_to
[n
], graph
->points_to
[first_pred
]);
2115 bitmap_set_bit (graph
->points_to
[n
], FIRST_REF_NODE
+ n
);
2116 graph
->pointer_label
[n
] = pointer_equiv_class
++;
2117 equiv_class_label_t ecl
;
2118 ecl
= equiv_class_lookup_or_add (pointer_equiv_class_table
,
2119 graph
->points_to
[n
]);
2120 ecl
->equivalence_class
= graph
->pointer_label
[n
];
2124 /* If there was only a single non-empty predecessor the pointer equiv
2125 class is the same. */
2126 if (!graph
->points_to
[n
])
2128 if (first_pred
!= -1U)
2130 graph
->pointer_label
[n
] = graph
->pointer_label
[first_pred
];
2131 graph
->points_to
[n
] = graph
->points_to
[first_pred
];
2136 if (!bitmap_empty_p (graph
->points_to
[n
]))
2138 equiv_class_label_t ecl
;
2139 ecl
= equiv_class_lookup_or_add (pointer_equiv_class_table
,
2140 graph
->points_to
[n
]);
2141 if (ecl
->equivalence_class
== 0)
2142 ecl
->equivalence_class
= pointer_equiv_class
++;
2145 BITMAP_FREE (graph
->points_to
[n
]);
2146 graph
->points_to
[n
] = ecl
->labels
;
2148 graph
->pointer_label
[n
] = ecl
->equivalence_class
;
2152 /* Print the pred graph in dot format. */
2155 dump_pred_graph (struct scc_info
*si
, FILE *file
)
2159 /* Only print the graph if it has already been initialized: */
2163 /* Prints the header of the dot file: */
2164 fprintf (file
, "strict digraph {\n");
2165 fprintf (file
, " node [\n shape = box\n ]\n");
2166 fprintf (file
, " edge [\n fontsize = \"12\"\n ]\n");
2167 fprintf (file
, "\n // List of nodes and complex constraints in "
2168 "the constraint graph:\n");
2170 /* The next lines print the nodes in the graph together with the
2171 complex constraints attached to them. */
2172 for (i
= 1; i
< graph
->size
; i
++)
2174 if (i
== FIRST_REF_NODE
)
2176 if (si
->node_mapping
[i
] != i
)
2178 if (i
< FIRST_REF_NODE
)
2179 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
2181 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
2182 if (graph
->points_to
[i
]
2183 && !bitmap_empty_p (graph
->points_to
[i
]))
2185 fprintf (file
, "[label=\"%s = {", get_varinfo (i
)->name
);
2188 EXECUTE_IF_SET_IN_BITMAP (graph
->points_to
[i
], 0, j
, bi
)
2189 fprintf (file
, " %d", j
);
2190 fprintf (file
, " }\"]");
2192 fprintf (file
, ";\n");
2195 /* Go over the edges. */
2196 fprintf (file
, "\n // Edges in the constraint graph:\n");
2197 for (i
= 1; i
< graph
->size
; i
++)
2201 if (si
->node_mapping
[i
] != i
)
2203 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[i
], 0, j
, bi
)
2205 unsigned from
= si
->node_mapping
[j
];
2206 if (from
< FIRST_REF_NODE
)
2207 fprintf (file
, "\"%s\"", get_varinfo (from
)->name
);
2209 fprintf (file
, "\"*%s\"", get_varinfo (from
- FIRST_REF_NODE
)->name
);
2210 fprintf (file
, " -> ");
2211 if (i
< FIRST_REF_NODE
)
2212 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
2214 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
2215 fprintf (file
, ";\n");
2219 /* Prints the tail of the dot file. */
2220 fprintf (file
, "}\n");
2223 /* Perform offline variable substitution, discovering equivalence
2224 classes, and eliminating non-pointer variables. */
2226 static struct scc_info
*
2227 perform_var_substitution (constraint_graph_t graph
)
2230 unsigned int size
= graph
->size
;
2231 struct scc_info
*si
= init_scc_info (size
);
2233 bitmap_obstack_initialize (&iteration_obstack
);
2234 pointer_equiv_class_table
.create (511);
2235 location_equiv_class_table
.create (511);
2236 pointer_equiv_class
= 1;
2237 location_equiv_class
= 1;
2239 /* Condense the nodes, which means to find SCC's, count incoming
2240 predecessors, and unite nodes in SCC's. */
2241 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2242 if (!bitmap_bit_p (si
->visited
, si
->node_mapping
[i
]))
2243 condense_visit (graph
, si
, si
->node_mapping
[i
]);
2245 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
2247 fprintf (dump_file
, "\n\n// The constraint graph before var-substitution "
2248 "in dot format:\n");
2249 dump_pred_graph (si
, dump_file
);
2250 fprintf (dump_file
, "\n\n");
2253 bitmap_clear (si
->visited
);
2254 /* Actually the label the nodes for pointer equivalences */
2255 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2256 if (!bitmap_bit_p (si
->visited
, si
->node_mapping
[i
]))
2257 label_visit (graph
, si
, si
->node_mapping
[i
]);
2259 /* Calculate location equivalence labels. */
2260 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2266 if (!graph
->pointed_by
[i
])
2268 pointed_by
= BITMAP_ALLOC (&iteration_obstack
);
2270 /* Translate the pointed-by mapping for pointer equivalence
2272 EXECUTE_IF_SET_IN_BITMAP (graph
->pointed_by
[i
], 0, j
, bi
)
2274 bitmap_set_bit (pointed_by
,
2275 graph
->pointer_label
[si
->node_mapping
[j
]]);
2277 /* The original pointed_by is now dead. */
2278 BITMAP_FREE (graph
->pointed_by
[i
]);
2280 /* Look up the location equivalence label if one exists, or make
2282 equiv_class_label_t ecl
;
2283 ecl
= equiv_class_lookup_or_add (location_equiv_class_table
, pointed_by
);
2284 if (ecl
->equivalence_class
== 0)
2285 ecl
->equivalence_class
= location_equiv_class
++;
2288 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2289 fprintf (dump_file
, "Found location equivalence for node %s\n",
2290 get_varinfo (i
)->name
);
2291 BITMAP_FREE (pointed_by
);
2293 graph
->loc_label
[i
] = ecl
->equivalence_class
;
2297 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2298 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2300 unsigned j
= si
->node_mapping
[i
];
2303 fprintf (dump_file
, "%s node id %d ",
2304 bitmap_bit_p (graph
->direct_nodes
, i
)
2305 ? "Direct" : "Indirect", i
);
2306 if (i
< FIRST_REF_NODE
)
2307 fprintf (dump_file
, "\"%s\"", get_varinfo (i
)->name
);
2309 fprintf (dump_file
, "\"*%s\"",
2310 get_varinfo (i
- FIRST_REF_NODE
)->name
);
2311 fprintf (dump_file
, " mapped to SCC leader node id %d ", j
);
2312 if (j
< FIRST_REF_NODE
)
2313 fprintf (dump_file
, "\"%s\"\n", get_varinfo (j
)->name
);
2315 fprintf (dump_file
, "\"*%s\"\n",
2316 get_varinfo (j
- FIRST_REF_NODE
)->name
);
2321 "Equivalence classes for %s node id %d ",
2322 bitmap_bit_p (graph
->direct_nodes
, i
)
2323 ? "direct" : "indirect", i
);
2324 if (i
< FIRST_REF_NODE
)
2325 fprintf (dump_file
, "\"%s\"", get_varinfo (i
)->name
);
2327 fprintf (dump_file
, "\"*%s\"",
2328 get_varinfo (i
- FIRST_REF_NODE
)->name
);
2330 ": pointer %d, location %d\n",
2331 graph
->pointer_label
[i
], graph
->loc_label
[i
]);
2335 /* Quickly eliminate our non-pointer variables. */
2337 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2339 unsigned int node
= si
->node_mapping
[i
];
2341 if (graph
->pointer_label
[node
] == 0)
2343 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2345 "%s is a non-pointer variable, eliminating edges.\n",
2346 get_varinfo (node
)->name
);
2347 stats
.nonpointer_vars
++;
2348 clear_edges_for_node (graph
, node
);
2355 /* Free information that was only necessary for variable
2359 free_var_substitution_info (struct scc_info
*si
)
2362 free (graph
->pointer_label
);
2363 free (graph
->loc_label
);
2364 free (graph
->pointed_by
);
2365 free (graph
->points_to
);
2366 free (graph
->eq_rep
);
2367 sbitmap_free (graph
->direct_nodes
);
2368 pointer_equiv_class_table
.dispose ();
2369 location_equiv_class_table
.dispose ();
2370 bitmap_obstack_release (&iteration_obstack
);
2373 /* Return an existing node that is equivalent to NODE, which has
2374 equivalence class LABEL, if one exists. Return NODE otherwise. */
2377 find_equivalent_node (constraint_graph_t graph
,
2378 unsigned int node
, unsigned int label
)
2380 /* If the address version of this variable is unused, we can
2381 substitute it for anything else with the same label.
2382 Otherwise, we know the pointers are equivalent, but not the
2383 locations, and we can unite them later. */
2385 if (!bitmap_bit_p (graph
->address_taken
, node
))
2387 gcc_checking_assert (label
< graph
->size
);
2389 if (graph
->eq_rep
[label
] != -1)
2391 /* Unify the two variables since we know they are equivalent. */
2392 if (unite (graph
->eq_rep
[label
], node
))
2393 unify_nodes (graph
, graph
->eq_rep
[label
], node
, false);
2394 return graph
->eq_rep
[label
];
2398 graph
->eq_rep
[label
] = node
;
2399 graph
->pe_rep
[label
] = node
;
2404 gcc_checking_assert (label
< graph
->size
);
2405 graph
->pe
[node
] = label
;
2406 if (graph
->pe_rep
[label
] == -1)
2407 graph
->pe_rep
[label
] = node
;
2413 /* Unite pointer equivalent but not location equivalent nodes in
2414 GRAPH. This may only be performed once variable substitution is
2418 unite_pointer_equivalences (constraint_graph_t graph
)
2422 /* Go through the pointer equivalences and unite them to their
2423 representative, if they aren't already. */
2424 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2426 unsigned int label
= graph
->pe
[i
];
2429 int label_rep
= graph
->pe_rep
[label
];
2431 if (label_rep
== -1)
2434 label_rep
= find (label_rep
);
2435 if (label_rep
>= 0 && unite (label_rep
, find (i
)))
2436 unify_nodes (graph
, label_rep
, i
, false);
2441 /* Move complex constraints to the GRAPH nodes they belong to. */
2444 move_complex_constraints (constraint_graph_t graph
)
2449 FOR_EACH_VEC_ELT (constraints
, i
, c
)
2453 struct constraint_expr lhs
= c
->lhs
;
2454 struct constraint_expr rhs
= c
->rhs
;
2456 if (lhs
.type
== DEREF
)
2458 insert_into_complex (graph
, lhs
.var
, c
);
2460 else if (rhs
.type
== DEREF
)
2462 if (!(get_varinfo (lhs
.var
)->is_special_var
))
2463 insert_into_complex (graph
, rhs
.var
, c
);
2465 else if (rhs
.type
!= ADDRESSOF
&& lhs
.var
> anything_id
2466 && (lhs
.offset
!= 0 || rhs
.offset
!= 0))
2468 insert_into_complex (graph
, rhs
.var
, c
);
2475 /* Optimize and rewrite complex constraints while performing
2476 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2477 result of perform_variable_substitution. */
2480 rewrite_constraints (constraint_graph_t graph
,
2481 struct scc_info
*si
)
2486 #ifdef ENABLE_CHECKING
2487 for (unsigned int j
= 0; j
< graph
->size
; j
++)
2488 gcc_assert (find (j
) == j
);
2491 FOR_EACH_VEC_ELT (constraints
, i
, c
)
2493 struct constraint_expr lhs
= c
->lhs
;
2494 struct constraint_expr rhs
= c
->rhs
;
2495 unsigned int lhsvar
= find (lhs
.var
);
2496 unsigned int rhsvar
= find (rhs
.var
);
2497 unsigned int lhsnode
, rhsnode
;
2498 unsigned int lhslabel
, rhslabel
;
2500 lhsnode
= si
->node_mapping
[lhsvar
];
2501 rhsnode
= si
->node_mapping
[rhsvar
];
2502 lhslabel
= graph
->pointer_label
[lhsnode
];
2503 rhslabel
= graph
->pointer_label
[rhsnode
];
2505 /* See if it is really a non-pointer variable, and if so, ignore
2509 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2512 fprintf (dump_file
, "%s is a non-pointer variable,"
2513 "ignoring constraint:",
2514 get_varinfo (lhs
.var
)->name
);
2515 dump_constraint (dump_file
, c
);
2516 fprintf (dump_file
, "\n");
2518 constraints
[i
] = NULL
;
2524 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2527 fprintf (dump_file
, "%s is a non-pointer variable,"
2528 "ignoring constraint:",
2529 get_varinfo (rhs
.var
)->name
);
2530 dump_constraint (dump_file
, c
);
2531 fprintf (dump_file
, "\n");
2533 constraints
[i
] = NULL
;
2537 lhsvar
= find_equivalent_node (graph
, lhsvar
, lhslabel
);
2538 rhsvar
= find_equivalent_node (graph
, rhsvar
, rhslabel
);
2539 c
->lhs
.var
= lhsvar
;
2540 c
->rhs
.var
= rhsvar
;
2544 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2545 part of an SCC, false otherwise. */
2548 eliminate_indirect_cycles (unsigned int node
)
2550 if (graph
->indirect_cycles
[node
] != -1
2551 && !bitmap_empty_p (get_varinfo (node
)->solution
))
2554 vec
<unsigned> queue
= vNULL
;
2556 unsigned int to
= find (graph
->indirect_cycles
[node
]);
2559 /* We can't touch the solution set and call unify_nodes
2560 at the same time, because unify_nodes is going to do
2561 bitmap unions into it. */
2563 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node
)->solution
, 0, i
, bi
)
2565 if (find (i
) == i
&& i
!= to
)
2568 queue
.safe_push (i
);
2573 queue
.iterate (queuepos
, &i
);
2576 unify_nodes (graph
, to
, i
, true);
2584 /* Solve the constraint graph GRAPH using our worklist solver.
2585 This is based on the PW* family of solvers from the "Efficient Field
2586 Sensitive Pointer Analysis for C" paper.
2587 It works by iterating over all the graph nodes, processing the complex
2588 constraints and propagating the copy constraints, until everything stops
2589 changed. This corresponds to steps 6-8 in the solving list given above. */
2592 solve_graph (constraint_graph_t graph
)
2594 unsigned int size
= graph
->size
;
2598 changed
= BITMAP_ALLOC (NULL
);
2600 /* Mark all initial non-collapsed nodes as changed. */
2601 for (i
= 1; i
< size
; i
++)
2603 varinfo_t ivi
= get_varinfo (i
);
2604 if (find (i
) == i
&& !bitmap_empty_p (ivi
->solution
)
2605 && ((graph
->succs
[i
] && !bitmap_empty_p (graph
->succs
[i
]))
2606 || graph
->complex[i
].length () > 0))
2607 bitmap_set_bit (changed
, i
);
2610 /* Allocate a bitmap to be used to store the changed bits. */
2611 pts
= BITMAP_ALLOC (&pta_obstack
);
2613 while (!bitmap_empty_p (changed
))
2616 struct topo_info
*ti
= init_topo_info ();
2619 bitmap_obstack_initialize (&iteration_obstack
);
2621 compute_topo_order (graph
, ti
);
2623 while (ti
->topo_order
.length () != 0)
2626 i
= ti
->topo_order
.pop ();
2628 /* If this variable is not a representative, skip it. */
2632 /* In certain indirect cycle cases, we may merge this
2633 variable to another. */
2634 if (eliminate_indirect_cycles (i
) && find (i
) != i
)
2637 /* If the node has changed, we need to process the
2638 complex constraints and outgoing edges again. */
2639 if (bitmap_clear_bit (changed
, i
))
2644 vec
<constraint_t
> complex = graph
->complex[i
];
2645 varinfo_t vi
= get_varinfo (i
);
2646 bool solution_empty
;
2648 /* Compute the changed set of solution bits. If anything
2649 is in the solution just propagate that. */
2650 if (bitmap_bit_p (vi
->solution
, anything_id
))
2652 /* If anything is also in the old solution there is
2654 ??? But we shouldn't ended up with "changed" set ... */
2656 && bitmap_bit_p (vi
->oldsolution
, anything_id
))
2658 bitmap_copy (pts
, get_varinfo (find (anything_id
))->solution
);
2660 else if (vi
->oldsolution
)
2661 bitmap_and_compl (pts
, vi
->solution
, vi
->oldsolution
);
2663 bitmap_copy (pts
, vi
->solution
);
2665 if (bitmap_empty_p (pts
))
2668 if (vi
->oldsolution
)
2669 bitmap_ior_into (vi
->oldsolution
, pts
);
2672 vi
->oldsolution
= BITMAP_ALLOC (&oldpta_obstack
);
2673 bitmap_copy (vi
->oldsolution
, pts
);
2676 solution
= vi
->solution
;
2677 solution_empty
= bitmap_empty_p (solution
);
2679 /* Process the complex constraints */
2680 FOR_EACH_VEC_ELT (complex, j
, c
)
2682 /* XXX: This is going to unsort the constraints in
2683 some cases, which will occasionally add duplicate
2684 constraints during unification. This does not
2685 affect correctness. */
2686 c
->lhs
.var
= find (c
->lhs
.var
);
2687 c
->rhs
.var
= find (c
->rhs
.var
);
2689 /* The only complex constraint that can change our
2690 solution to non-empty, given an empty solution,
2691 is a constraint where the lhs side is receiving
2692 some set from elsewhere. */
2693 if (!solution_empty
|| c
->lhs
.type
!= DEREF
)
2694 do_complex_constraint (graph
, c
, pts
);
2697 solution_empty
= bitmap_empty_p (solution
);
2699 if (!solution_empty
)
2702 unsigned eff_escaped_id
= find (escaped_id
);
2704 /* Propagate solution to all successors. */
2705 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
],
2711 unsigned int to
= find (j
);
2712 tmp
= get_varinfo (to
)->solution
;
2715 /* Don't try to propagate to ourselves. */
2719 /* If we propagate from ESCAPED use ESCAPED as
2721 if (i
== eff_escaped_id
)
2722 flag
= bitmap_set_bit (tmp
, escaped_id
);
2724 flag
= bitmap_ior_into (tmp
, pts
);
2727 bitmap_set_bit (changed
, to
);
2732 free_topo_info (ti
);
2733 bitmap_obstack_release (&iteration_obstack
);
2737 BITMAP_FREE (changed
);
2738 bitmap_obstack_release (&oldpta_obstack
);
2741 /* Map from trees to variable infos. */
2742 static struct pointer_map_t
*vi_for_tree
;
2745 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2748 insert_vi_for_tree (tree t
, varinfo_t vi
)
2750 void **slot
= pointer_map_insert (vi_for_tree
, t
);
2752 gcc_assert (*slot
== NULL
);
2756 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2757 exist in the map, return NULL, otherwise, return the varinfo we found. */
2760 lookup_vi_for_tree (tree t
)
2762 void **slot
= pointer_map_contains (vi_for_tree
, t
);
2766 return (varinfo_t
) *slot
;
2769 /* Return a printable name for DECL */
2772 alias_get_name (tree decl
)
2774 const char *res
= NULL
;
2776 int num_printed
= 0;
2781 if (TREE_CODE (decl
) == SSA_NAME
)
2783 res
= get_name (decl
);
2785 num_printed
= asprintf (&temp
, "%s_%u", res
, SSA_NAME_VERSION (decl
));
2787 num_printed
= asprintf (&temp
, "_%u", SSA_NAME_VERSION (decl
));
2788 if (num_printed
> 0)
2790 res
= ggc_strdup (temp
);
2794 else if (DECL_P (decl
))
2796 if (DECL_ASSEMBLER_NAME_SET_P (decl
))
2797 res
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
2800 res
= get_name (decl
);
2803 num_printed
= asprintf (&temp
, "D.%u", DECL_UID (decl
));
2804 if (num_printed
> 0)
2806 res
= ggc_strdup (temp
);
2818 /* Find the variable id for tree T in the map.
2819 If T doesn't exist in the map, create an entry for it and return it. */
2822 get_vi_for_tree (tree t
)
2824 void **slot
= pointer_map_contains (vi_for_tree
, t
);
2826 return get_varinfo (create_variable_info_for (t
, alias_get_name (t
)));
2828 return (varinfo_t
) *slot
;
2831 /* Get a scalar constraint expression for a new temporary variable. */
2833 static struct constraint_expr
2834 new_scalar_tmp_constraint_exp (const char *name
)
2836 struct constraint_expr tmp
;
2839 vi
= new_var_info (NULL_TREE
, name
);
2843 vi
->is_full_var
= 1;
2852 /* Get a constraint expression vector from an SSA_VAR_P node.
2853 If address_p is true, the result will be taken its address of. */
2856 get_constraint_for_ssa_var (tree t
, vec
<ce_s
> *results
, bool address_p
)
2858 struct constraint_expr cexpr
;
2861 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2862 gcc_assert (TREE_CODE (t
) == SSA_NAME
|| DECL_P (t
));
2864 /* For parameters, get at the points-to set for the actual parm
2866 if (TREE_CODE (t
) == SSA_NAME
2867 && SSA_NAME_IS_DEFAULT_DEF (t
)
2868 && (TREE_CODE (SSA_NAME_VAR (t
)) == PARM_DECL
2869 || TREE_CODE (SSA_NAME_VAR (t
)) == RESULT_DECL
))
2871 get_constraint_for_ssa_var (SSA_NAME_VAR (t
), results
, address_p
);
2875 /* For global variables resort to the alias target. */
2876 if (TREE_CODE (t
) == VAR_DECL
2877 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
2879 struct varpool_node
*node
= varpool_get_node (t
);
2880 if (node
&& node
->alias
&& node
->analyzed
)
2882 node
= varpool_variable_node (node
, NULL
);
2887 vi
= get_vi_for_tree (t
);
2889 cexpr
.type
= SCALAR
;
2891 /* If we determine the result is "anything", and we know this is readonly,
2892 say it points to readonly memory instead. */
2893 if (cexpr
.var
== anything_id
&& TREE_READONLY (t
))
2896 cexpr
.type
= ADDRESSOF
;
2897 cexpr
.var
= readonly_id
;
2900 /* If we are not taking the address of the constraint expr, add all
2901 sub-fiels of the variable as well. */
2903 && !vi
->is_full_var
)
2905 for (; vi
; vi
= vi_next (vi
))
2908 results
->safe_push (cexpr
);
2913 results
->safe_push (cexpr
);
2916 /* Process constraint T, performing various simplifications and then
2917 adding it to our list of overall constraints. */
2920 process_constraint (constraint_t t
)
2922 struct constraint_expr rhs
= t
->rhs
;
2923 struct constraint_expr lhs
= t
->lhs
;
2925 gcc_assert (rhs
.var
< varmap
.length ());
2926 gcc_assert (lhs
.var
< varmap
.length ());
2928 /* If we didn't get any useful constraint from the lhs we get
2929 &ANYTHING as fallback from get_constraint_for. Deal with
2930 it here by turning it into *ANYTHING. */
2931 if (lhs
.type
== ADDRESSOF
2932 && lhs
.var
== anything_id
)
2935 /* ADDRESSOF on the lhs is invalid. */
2936 gcc_assert (lhs
.type
!= ADDRESSOF
);
2938 /* We shouldn't add constraints from things that cannot have pointers.
2939 It's not completely trivial to avoid in the callers, so do it here. */
2940 if (rhs
.type
!= ADDRESSOF
2941 && !get_varinfo (rhs
.var
)->may_have_pointers
)
2944 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2945 if (!get_varinfo (lhs
.var
)->may_have_pointers
)
2948 /* This can happen in our IR with things like n->a = *p */
2949 if (rhs
.type
== DEREF
&& lhs
.type
== DEREF
&& rhs
.var
!= anything_id
)
2951 /* Split into tmp = *rhs, *lhs = tmp */
2952 struct constraint_expr tmplhs
;
2953 tmplhs
= new_scalar_tmp_constraint_exp ("doubledereftmp");
2954 process_constraint (new_constraint (tmplhs
, rhs
));
2955 process_constraint (new_constraint (lhs
, tmplhs
));
2957 else if (rhs
.type
== ADDRESSOF
&& lhs
.type
== DEREF
)
2959 /* Split into tmp = &rhs, *lhs = tmp */
2960 struct constraint_expr tmplhs
;
2961 tmplhs
= new_scalar_tmp_constraint_exp ("derefaddrtmp");
2962 process_constraint (new_constraint (tmplhs
, rhs
));
2963 process_constraint (new_constraint (lhs
, tmplhs
));
2967 gcc_assert (rhs
.type
!= ADDRESSOF
|| rhs
.offset
== 0);
2968 constraints
.safe_push (t
);
2973 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2976 static HOST_WIDE_INT
2977 bitpos_of_field (const tree fdecl
)
2979 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl
))
2980 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl
)))
2983 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl
)) * BITS_PER_UNIT
2984 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl
)));
2988 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2989 resulting constraint expressions in *RESULTS. */
2992 get_constraint_for_ptr_offset (tree ptr
, tree offset
,
2995 struct constraint_expr c
;
2997 HOST_WIDE_INT rhsoffset
;
2999 /* If we do not do field-sensitive PTA adding offsets to pointers
3000 does not change the points-to solution. */
3001 if (!use_field_sensitive
)
3003 get_constraint_for_rhs (ptr
, results
);
3007 /* If the offset is not a non-negative integer constant that fits
3008 in a HOST_WIDE_INT, we have to fall back to a conservative
3009 solution which includes all sub-fields of all pointed-to
3010 variables of ptr. */
3011 if (offset
== NULL_TREE
3012 || TREE_CODE (offset
) != INTEGER_CST
)
3013 rhsoffset
= UNKNOWN_OFFSET
;
3016 /* Sign-extend the offset. */
3017 offset_int soffset
= offset_int::from (offset
, SIGNED
);
3018 if (!wi::fits_shwi_p (soffset
))
3019 rhsoffset
= UNKNOWN_OFFSET
;
3022 /* Make sure the bit-offset also fits. */
3023 HOST_WIDE_INT rhsunitoffset
= soffset
.to_shwi ();
3024 rhsoffset
= rhsunitoffset
* BITS_PER_UNIT
;
3025 if (rhsunitoffset
!= rhsoffset
/ BITS_PER_UNIT
)
3026 rhsoffset
= UNKNOWN_OFFSET
;
3030 get_constraint_for_rhs (ptr
, results
);
3034 /* As we are eventually appending to the solution do not use
3035 vec::iterate here. */
3036 n
= results
->length ();
3037 for (j
= 0; j
< n
; j
++)
3041 curr
= get_varinfo (c
.var
);
3043 if (c
.type
== ADDRESSOF
3044 /* If this varinfo represents a full variable just use it. */
3045 && curr
->is_full_var
)
3047 else if (c
.type
== ADDRESSOF
3048 /* If we do not know the offset add all subfields. */
3049 && rhsoffset
== UNKNOWN_OFFSET
)
3051 varinfo_t temp
= get_varinfo (curr
->head
);
3054 struct constraint_expr c2
;
3056 c2
.type
= ADDRESSOF
;
3058 if (c2
.var
!= c
.var
)
3059 results
->safe_push (c2
);
3060 temp
= vi_next (temp
);
3064 else if (c
.type
== ADDRESSOF
)
3067 unsigned HOST_WIDE_INT offset
= curr
->offset
+ rhsoffset
;
3069 /* Search the sub-field which overlaps with the
3070 pointed-to offset. If the result is outside of the variable
3071 we have to provide a conservative result, as the variable is
3072 still reachable from the resulting pointer (even though it
3073 technically cannot point to anything). The last and first
3074 sub-fields are such conservative results.
3075 ??? If we always had a sub-field for &object + 1 then
3076 we could represent this in a more precise way. */
3078 && curr
->offset
< offset
)
3080 temp
= first_or_preceding_vi_for_offset (curr
, offset
);
3082 /* If the found variable is not exactly at the pointed to
3083 result, we have to include the next variable in the
3084 solution as well. Otherwise two increments by offset / 2
3085 do not result in the same or a conservative superset
3087 if (temp
->offset
!= offset
3090 struct constraint_expr c2
;
3091 c2
.var
= temp
->next
;
3092 c2
.type
= ADDRESSOF
;
3094 results
->safe_push (c2
);
3100 c
.offset
= rhsoffset
;
3107 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3108 If address_p is true the result will be taken its address of.
3109 If lhs_p is true then the constraint expression is assumed to be used
3113 get_constraint_for_component_ref (tree t
, vec
<ce_s
> *results
,
3114 bool address_p
, bool lhs_p
)
3117 HOST_WIDE_INT bitsize
= -1;
3118 HOST_WIDE_INT bitmaxsize
= -1;
3119 HOST_WIDE_INT bitpos
;
3122 /* Some people like to do cute things like take the address of
3125 while (handled_component_p (forzero
)
3126 || INDIRECT_REF_P (forzero
)
3127 || TREE_CODE (forzero
) == MEM_REF
)
3128 forzero
= TREE_OPERAND (forzero
, 0);
3130 if (CONSTANT_CLASS_P (forzero
) && integer_zerop (forzero
))
3132 struct constraint_expr temp
;
3135 temp
.var
= integer_id
;
3137 results
->safe_push (temp
);
3141 /* Handle type-punning through unions. If we are extracting a pointer
3142 from a union via a possibly type-punning access that pointer
3143 points to anything, similar to a conversion of an integer to
3149 TREE_CODE (u
) == COMPONENT_REF
|| TREE_CODE (u
) == ARRAY_REF
;
3150 u
= TREE_OPERAND (u
, 0))
3151 if (TREE_CODE (u
) == COMPONENT_REF
3152 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u
, 0))) == UNION_TYPE
)
3154 struct constraint_expr temp
;
3157 temp
.var
= anything_id
;
3158 temp
.type
= ADDRESSOF
;
3159 results
->safe_push (temp
);
3164 t
= get_ref_base_and_extent (t
, &bitpos
, &bitsize
, &bitmaxsize
);
3166 /* Pretend to take the address of the base, we'll take care of
3167 adding the required subset of sub-fields below. */
3168 get_constraint_for_1 (t
, results
, true, lhs_p
);
3169 gcc_assert (results
->length () == 1);
3170 struct constraint_expr
&result
= results
->last ();
3172 if (result
.type
== SCALAR
3173 && get_varinfo (result
.var
)->is_full_var
)
3174 /* For single-field vars do not bother about the offset. */
3176 else if (result
.type
== SCALAR
)
3178 /* In languages like C, you can access one past the end of an
3179 array. You aren't allowed to dereference it, so we can
3180 ignore this constraint. When we handle pointer subtraction,
3181 we may have to do something cute here. */
3183 if ((unsigned HOST_WIDE_INT
)bitpos
< get_varinfo (result
.var
)->fullsize
3186 /* It's also not true that the constraint will actually start at the
3187 right offset, it may start in some padding. We only care about
3188 setting the constraint to the first actual field it touches, so
3190 struct constraint_expr cexpr
= result
;
3194 for (curr
= get_varinfo (cexpr
.var
); curr
; curr
= vi_next (curr
))
3196 if (ranges_overlap_p (curr
->offset
, curr
->size
,
3197 bitpos
, bitmaxsize
))
3199 cexpr
.var
= curr
->id
;
3200 results
->safe_push (cexpr
);
3205 /* If we are going to take the address of this field then
3206 to be able to compute reachability correctly add at least
3207 the last field of the variable. */
3208 if (address_p
&& results
->length () == 0)
3210 curr
= get_varinfo (cexpr
.var
);
3211 while (curr
->next
!= 0)
3212 curr
= vi_next (curr
);
3213 cexpr
.var
= curr
->id
;
3214 results
->safe_push (cexpr
);
3216 else if (results
->length () == 0)
3217 /* Assert that we found *some* field there. The user couldn't be
3218 accessing *only* padding. */
3219 /* Still the user could access one past the end of an array
3220 embedded in a struct resulting in accessing *only* padding. */
3221 /* Or accessing only padding via type-punning to a type
3222 that has a filed just in padding space. */
3224 cexpr
.type
= SCALAR
;
3225 cexpr
.var
= anything_id
;
3227 results
->safe_push (cexpr
);
3230 else if (bitmaxsize
== 0)
3232 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3233 fprintf (dump_file
, "Access to zero-sized part of variable,"
3237 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3238 fprintf (dump_file
, "Access to past the end of variable, ignoring\n");
3240 else if (result
.type
== DEREF
)
3242 /* If we do not know exactly where the access goes say so. Note
3243 that only for non-structure accesses we know that we access
3244 at most one subfiled of any variable. */
3246 || bitsize
!= bitmaxsize
3247 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t
))
3248 || result
.offset
== UNKNOWN_OFFSET
)
3249 result
.offset
= UNKNOWN_OFFSET
;
3251 result
.offset
+= bitpos
;
3253 else if (result
.type
== ADDRESSOF
)
3255 /* We can end up here for component references on a
3256 VIEW_CONVERT_EXPR <>(&foobar). */
3257 result
.type
= SCALAR
;
3258 result
.var
= anything_id
;
3266 /* Dereference the constraint expression CONS, and return the result.
3267 DEREF (ADDRESSOF) = SCALAR
3268 DEREF (SCALAR) = DEREF
3269 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3270 This is needed so that we can handle dereferencing DEREF constraints. */
3273 do_deref (vec
<ce_s
> *constraints
)
3275 struct constraint_expr
*c
;
3278 FOR_EACH_VEC_ELT (*constraints
, i
, c
)
3280 if (c
->type
== SCALAR
)
3282 else if (c
->type
== ADDRESSOF
)
3284 else if (c
->type
== DEREF
)
3286 struct constraint_expr tmplhs
;
3287 tmplhs
= new_scalar_tmp_constraint_exp ("dereftmp");
3288 process_constraint (new_constraint (tmplhs
, *c
));
3289 c
->var
= tmplhs
.var
;
3296 /* Given a tree T, return the constraint expression for taking the
3300 get_constraint_for_address_of (tree t
, vec
<ce_s
> *results
)
3302 struct constraint_expr
*c
;
3305 get_constraint_for_1 (t
, results
, true, true);
3307 FOR_EACH_VEC_ELT (*results
, i
, c
)
3309 if (c
->type
== DEREF
)
3312 c
->type
= ADDRESSOF
;
3316 /* Given a tree T, return the constraint expression for it. */
3319 get_constraint_for_1 (tree t
, vec
<ce_s
> *results
, bool address_p
,
3322 struct constraint_expr temp
;
3324 /* x = integer is all glommed to a single variable, which doesn't
3325 point to anything by itself. That is, of course, unless it is an
3326 integer constant being treated as a pointer, in which case, we
3327 will return that this is really the addressof anything. This
3328 happens below, since it will fall into the default case. The only
3329 case we know something about an integer treated like a pointer is
3330 when it is the NULL pointer, and then we just say it points to
3333 Do not do that if -fno-delete-null-pointer-checks though, because
3334 in that case *NULL does not fail, so it _should_ alias *anything.
3335 It is not worth adding a new option or renaming the existing one,
3336 since this case is relatively obscure. */
3337 if ((TREE_CODE (t
) == INTEGER_CST
3338 && integer_zerop (t
))
3339 /* The only valid CONSTRUCTORs in gimple with pointer typed
3340 elements are zero-initializer. But in IPA mode we also
3341 process global initializers, so verify at least. */
3342 || (TREE_CODE (t
) == CONSTRUCTOR
3343 && CONSTRUCTOR_NELTS (t
) == 0))
3345 if (flag_delete_null_pointer_checks
)
3346 temp
.var
= nothing_id
;
3348 temp
.var
= nonlocal_id
;
3349 temp
.type
= ADDRESSOF
;
3351 results
->safe_push (temp
);
3355 /* String constants are read-only. */
3356 if (TREE_CODE (t
) == STRING_CST
)
3358 temp
.var
= readonly_id
;
3361 results
->safe_push (temp
);
3365 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
3367 case tcc_expression
:
3369 switch (TREE_CODE (t
))
3372 get_constraint_for_address_of (TREE_OPERAND (t
, 0), results
);
3380 switch (TREE_CODE (t
))
3384 struct constraint_expr cs
;
3386 get_constraint_for_ptr_offset (TREE_OPERAND (t
, 0),
3387 TREE_OPERAND (t
, 1), results
);
3390 /* If we are not taking the address then make sure to process
3391 all subvariables we might access. */
3395 cs
= results
->last ();
3396 if (cs
.type
== DEREF
3397 && type_can_have_subvars (TREE_TYPE (t
)))
3399 /* For dereferences this means we have to defer it
3401 results
->last ().offset
= UNKNOWN_OFFSET
;
3404 if (cs
.type
!= SCALAR
)
3407 vi
= get_varinfo (cs
.var
);
3408 curr
= vi_next (vi
);
3409 if (!vi
->is_full_var
3412 unsigned HOST_WIDE_INT size
;
3413 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t
))))
3414 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t
)));
3417 for (; curr
; curr
= vi_next (curr
))
3419 if (curr
->offset
- vi
->offset
< size
)
3422 results
->safe_push (cs
);
3431 case ARRAY_RANGE_REF
:
3433 get_constraint_for_component_ref (t
, results
, address_p
, lhs_p
);
3435 case VIEW_CONVERT_EXPR
:
3436 get_constraint_for_1 (TREE_OPERAND (t
, 0), results
, address_p
,
3439 /* We are missing handling for TARGET_MEM_REF here. */
3444 case tcc_exceptional
:
3446 switch (TREE_CODE (t
))
3450 get_constraint_for_ssa_var (t
, results
, address_p
);
3457 vec
<ce_s
> tmp
= vNULL
;
3458 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
3460 struct constraint_expr
*rhsp
;
3462 get_constraint_for_1 (val
, &tmp
, address_p
, lhs_p
);
3463 FOR_EACH_VEC_ELT (tmp
, j
, rhsp
)
3464 results
->safe_push (*rhsp
);
3468 /* We do not know whether the constructor was complete,
3469 so technically we have to add &NOTHING or &ANYTHING
3470 like we do for an empty constructor as well. */
3477 case tcc_declaration
:
3479 get_constraint_for_ssa_var (t
, results
, address_p
);
3484 /* We cannot refer to automatic variables through constants. */
3485 temp
.type
= ADDRESSOF
;
3486 temp
.var
= nonlocal_id
;
3488 results
->safe_push (temp
);
3494 /* The default fallback is a constraint from anything. */
3495 temp
.type
= ADDRESSOF
;
3496 temp
.var
= anything_id
;
3498 results
->safe_push (temp
);
3501 /* Given a gimple tree T, return the constraint expression vector for it. */
3504 get_constraint_for (tree t
, vec
<ce_s
> *results
)
3506 gcc_assert (results
->length () == 0);
3508 get_constraint_for_1 (t
, results
, false, true);
3511 /* Given a gimple tree T, return the constraint expression vector for it
3512 to be used as the rhs of a constraint. */
3515 get_constraint_for_rhs (tree t
, vec
<ce_s
> *results
)
3517 gcc_assert (results
->length () == 0);
3519 get_constraint_for_1 (t
, results
, false, false);
3523 /* Efficiently generates constraints from all entries in *RHSC to all
3524 entries in *LHSC. */
3527 process_all_all_constraints (vec
<ce_s
> lhsc
,
3530 struct constraint_expr
*lhsp
, *rhsp
;
3533 if (lhsc
.length () <= 1 || rhsc
.length () <= 1)
3535 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
3536 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
3537 process_constraint (new_constraint (*lhsp
, *rhsp
));
3541 struct constraint_expr tmp
;
3542 tmp
= new_scalar_tmp_constraint_exp ("allalltmp");
3543 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
3544 process_constraint (new_constraint (tmp
, *rhsp
));
3545 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
3546 process_constraint (new_constraint (*lhsp
, tmp
));
3550 /* Handle aggregate copies by expanding into copies of the respective
3551 fields of the structures. */
3554 do_structure_copy (tree lhsop
, tree rhsop
)
3556 struct constraint_expr
*lhsp
, *rhsp
;
3557 vec
<ce_s
> lhsc
= vNULL
;
3558 vec
<ce_s
> rhsc
= vNULL
;
3561 get_constraint_for (lhsop
, &lhsc
);
3562 get_constraint_for_rhs (rhsop
, &rhsc
);
3565 if (lhsp
->type
== DEREF
3566 || (lhsp
->type
== ADDRESSOF
&& lhsp
->var
== anything_id
)
3567 || rhsp
->type
== DEREF
)
3569 if (lhsp
->type
== DEREF
)
3571 gcc_assert (lhsc
.length () == 1);
3572 lhsp
->offset
= UNKNOWN_OFFSET
;
3574 if (rhsp
->type
== DEREF
)
3576 gcc_assert (rhsc
.length () == 1);
3577 rhsp
->offset
= UNKNOWN_OFFSET
;
3579 process_all_all_constraints (lhsc
, rhsc
);
3581 else if (lhsp
->type
== SCALAR
3582 && (rhsp
->type
== SCALAR
3583 || rhsp
->type
== ADDRESSOF
))
3585 HOST_WIDE_INT lhssize
, lhsmaxsize
, lhsoffset
;
3586 HOST_WIDE_INT rhssize
, rhsmaxsize
, rhsoffset
;
3588 get_ref_base_and_extent (lhsop
, &lhsoffset
, &lhssize
, &lhsmaxsize
);
3589 get_ref_base_and_extent (rhsop
, &rhsoffset
, &rhssize
, &rhsmaxsize
);
3590 for (j
= 0; lhsc
.iterate (j
, &lhsp
);)
3592 varinfo_t lhsv
, rhsv
;
3594 lhsv
= get_varinfo (lhsp
->var
);
3595 rhsv
= get_varinfo (rhsp
->var
);
3596 if (lhsv
->may_have_pointers
3597 && (lhsv
->is_full_var
3598 || rhsv
->is_full_var
3599 || ranges_overlap_p (lhsv
->offset
+ rhsoffset
, lhsv
->size
,
3600 rhsv
->offset
+ lhsoffset
, rhsv
->size
)))
3601 process_constraint (new_constraint (*lhsp
, *rhsp
));
3602 if (!rhsv
->is_full_var
3603 && (lhsv
->is_full_var
3604 || (lhsv
->offset
+ rhsoffset
+ lhsv
->size
3605 > rhsv
->offset
+ lhsoffset
+ rhsv
->size
)))
3608 if (k
>= rhsc
.length ())
3622 /* Create constraints ID = { rhsc }. */
3625 make_constraints_to (unsigned id
, vec
<ce_s
> rhsc
)
3627 struct constraint_expr
*c
;
3628 struct constraint_expr includes
;
3632 includes
.offset
= 0;
3633 includes
.type
= SCALAR
;
3635 FOR_EACH_VEC_ELT (rhsc
, j
, c
)
3636 process_constraint (new_constraint (includes
, *c
));
3639 /* Create a constraint ID = OP. */
3642 make_constraint_to (unsigned id
, tree op
)
3644 vec
<ce_s
> rhsc
= vNULL
;
3645 get_constraint_for_rhs (op
, &rhsc
);
3646 make_constraints_to (id
, rhsc
);
3650 /* Create a constraint ID = &FROM. */
3653 make_constraint_from (varinfo_t vi
, int from
)
3655 struct constraint_expr lhs
, rhs
;
3663 rhs
.type
= ADDRESSOF
;
3664 process_constraint (new_constraint (lhs
, rhs
));
3667 /* Create a constraint ID = FROM. */
3670 make_copy_constraint (varinfo_t vi
, int from
)
3672 struct constraint_expr lhs
, rhs
;
3681 process_constraint (new_constraint (lhs
, rhs
));
3684 /* Make constraints necessary to make OP escape. */
3687 make_escape_constraint (tree op
)
3689 make_constraint_to (escaped_id
, op
);
3692 /* Add constraints to that the solution of VI is transitively closed. */
3695 make_transitive_closure_constraints (varinfo_t vi
)
3697 struct constraint_expr lhs
, rhs
;
3706 process_constraint (new_constraint (lhs
, rhs
));
3708 /* VAR = VAR + UNKNOWN; */
3714 rhs
.offset
= UNKNOWN_OFFSET
;
3715 process_constraint (new_constraint (lhs
, rhs
));
3718 /* Temporary storage for fake var decls. */
3719 struct obstack fake_var_decl_obstack
;
3721 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3724 build_fake_var_decl (tree type
)
3726 tree decl
= (tree
) XOBNEW (&fake_var_decl_obstack
, struct tree_var_decl
);
3727 memset (decl
, 0, sizeof (struct tree_var_decl
));
3728 TREE_SET_CODE (decl
, VAR_DECL
);
3729 TREE_TYPE (decl
) = type
;
3730 DECL_UID (decl
) = allocate_decl_uid ();
3731 SET_DECL_PT_UID (decl
, -1);
3732 layout_decl (decl
, 0);
3736 /* Create a new artificial heap variable with NAME.
3737 Return the created variable. */
3740 make_heapvar (const char *name
)
3745 heapvar
= build_fake_var_decl (ptr_type_node
);
3746 DECL_EXTERNAL (heapvar
) = 1;
3748 vi
= new_var_info (heapvar
, name
);
3749 vi
->is_artificial_var
= true;
3750 vi
->is_heap_var
= true;
3751 vi
->is_unknown_size_var
= true;
3755 vi
->is_full_var
= true;
3756 insert_vi_for_tree (heapvar
, vi
);
3761 /* Create a new artificial heap variable with NAME and make a
3762 constraint from it to LHS. Set flags according to a tag used
3763 for tracking restrict pointers. */
3766 make_constraint_from_restrict (varinfo_t lhs
, const char *name
)
3768 varinfo_t vi
= make_heapvar (name
);
3769 vi
->is_global_var
= 1;
3770 vi
->may_have_pointers
= 1;
3771 make_constraint_from (lhs
, vi
->id
);
3775 /* Create a new artificial heap variable with NAME and make a
3776 constraint from it to LHS. Set flags according to a tag used
3777 for tracking restrict pointers and make the artificial heap
3778 point to global memory. */
3781 make_constraint_from_global_restrict (varinfo_t lhs
, const char *name
)
3783 varinfo_t vi
= make_constraint_from_restrict (lhs
, name
);
3784 make_copy_constraint (vi
, nonlocal_id
);
3788 /* In IPA mode there are varinfos for different aspects of reach
3789 function designator. One for the points-to set of the return
3790 value, one for the variables that are clobbered by the function,
3791 one for its uses and one for each parameter (including a single
3792 glob for remaining variadic arguments). */
3794 enum { fi_clobbers
= 1, fi_uses
= 2,
3795 fi_static_chain
= 3, fi_result
= 4, fi_parm_base
= 5 };
3797 /* Get a constraint for the requested part of a function designator FI
3798 when operating in IPA mode. */
3800 static struct constraint_expr
3801 get_function_part_constraint (varinfo_t fi
, unsigned part
)
3803 struct constraint_expr c
;
3805 gcc_assert (in_ipa_mode
);
3807 if (fi
->id
== anything_id
)
3809 /* ??? We probably should have a ANYFN special variable. */
3810 c
.var
= anything_id
;
3814 else if (TREE_CODE (fi
->decl
) == FUNCTION_DECL
)
3816 varinfo_t ai
= first_vi_for_offset (fi
, part
);
3820 c
.var
= anything_id
;
3834 /* For non-IPA mode, generate constraints necessary for a call on the
3838 handle_rhs_call (gimple stmt
, vec
<ce_s
> *results
)
3840 struct constraint_expr rhsc
;
3842 bool returns_uses
= false;
3844 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3846 tree arg
= gimple_call_arg (stmt
, i
);
3847 int flags
= gimple_call_arg_flags (stmt
, i
);
3849 /* If the argument is not used we can ignore it. */
3850 if (flags
& EAF_UNUSED
)
3853 /* As we compute ESCAPED context-insensitive we do not gain
3854 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3855 set. The argument would still get clobbered through the
3857 if ((flags
& EAF_NOCLOBBER
)
3858 && (flags
& EAF_NOESCAPE
))
3860 varinfo_t uses
= get_call_use_vi (stmt
);
3861 if (!(flags
& EAF_DIRECT
))
3863 varinfo_t tem
= new_var_info (NULL_TREE
, "callarg");
3864 make_constraint_to (tem
->id
, arg
);
3865 make_transitive_closure_constraints (tem
);
3866 make_copy_constraint (uses
, tem
->id
);
3869 make_constraint_to (uses
->id
, arg
);
3870 returns_uses
= true;
3872 else if (flags
& EAF_NOESCAPE
)
3874 struct constraint_expr lhs
, rhs
;
3875 varinfo_t uses
= get_call_use_vi (stmt
);
3876 varinfo_t clobbers
= get_call_clobber_vi (stmt
);
3877 varinfo_t tem
= new_var_info (NULL_TREE
, "callarg");
3878 make_constraint_to (tem
->id
, arg
);
3879 if (!(flags
& EAF_DIRECT
))
3880 make_transitive_closure_constraints (tem
);
3881 make_copy_constraint (uses
, tem
->id
);
3882 make_copy_constraint (clobbers
, tem
->id
);
3883 /* Add *tem = nonlocal, do not add *tem = callused as
3884 EAF_NOESCAPE parameters do not escape to other parameters
3885 and all other uses appear in NONLOCAL as well. */
3890 rhs
.var
= nonlocal_id
;
3892 process_constraint (new_constraint (lhs
, rhs
));
3893 returns_uses
= true;
3896 make_escape_constraint (arg
);
3899 /* If we added to the calls uses solution make sure we account for
3900 pointers to it to be returned. */
3903 rhsc
.var
= get_call_use_vi (stmt
)->id
;
3906 results
->safe_push (rhsc
);
3909 /* The static chain escapes as well. */
3910 if (gimple_call_chain (stmt
))
3911 make_escape_constraint (gimple_call_chain (stmt
));
3913 /* And if we applied NRV the address of the return slot escapes as well. */
3914 if (gimple_call_return_slot_opt_p (stmt
)
3915 && gimple_call_lhs (stmt
) != NULL_TREE
3916 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
3918 vec
<ce_s
> tmpc
= vNULL
;
3919 struct constraint_expr lhsc
, *c
;
3920 get_constraint_for_address_of (gimple_call_lhs (stmt
), &tmpc
);
3921 lhsc
.var
= escaped_id
;
3924 FOR_EACH_VEC_ELT (tmpc
, i
, c
)
3925 process_constraint (new_constraint (lhsc
, *c
));
3929 /* Regular functions return nonlocal memory. */
3930 rhsc
.var
= nonlocal_id
;
3933 results
->safe_push (rhsc
);
3936 /* For non-IPA mode, generate constraints necessary for a call
3937 that returns a pointer and assigns it to LHS. This simply makes
3938 the LHS point to global and escaped variables. */
3941 handle_lhs_call (gimple stmt
, tree lhs
, int flags
, vec
<ce_s
> rhsc
,
3944 vec
<ce_s
> lhsc
= vNULL
;
3946 get_constraint_for (lhs
, &lhsc
);
3947 /* If the store is to a global decl make sure to
3948 add proper escape constraints. */
3949 lhs
= get_base_address (lhs
);
3952 && is_global_var (lhs
))
3954 struct constraint_expr tmpc
;
3955 tmpc
.var
= escaped_id
;
3958 lhsc
.safe_push (tmpc
);
3961 /* If the call returns an argument unmodified override the rhs
3963 flags
= gimple_call_return_flags (stmt
);
3964 if (flags
& ERF_RETURNS_ARG
3965 && (flags
& ERF_RETURN_ARG_MASK
) < gimple_call_num_args (stmt
))
3969 arg
= gimple_call_arg (stmt
, flags
& ERF_RETURN_ARG_MASK
);
3970 get_constraint_for (arg
, &rhsc
);
3971 process_all_all_constraints (lhsc
, rhsc
);
3974 else if (flags
& ERF_NOALIAS
)
3977 struct constraint_expr tmpc
;
3979 vi
= make_heapvar ("HEAP");
3980 /* We delay marking allocated storage global until we know if
3982 DECL_EXTERNAL (vi
->decl
) = 0;
3983 vi
->is_global_var
= 0;
3984 /* If this is not a real malloc call assume the memory was
3985 initialized and thus may point to global memory. All
3986 builtin functions with the malloc attribute behave in a sane way. */
3988 || DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_NORMAL
)
3989 make_constraint_from (vi
, nonlocal_id
);
3992 tmpc
.type
= ADDRESSOF
;
3993 rhsc
.safe_push (tmpc
);
3994 process_all_all_constraints (lhsc
, rhsc
);
3998 process_all_all_constraints (lhsc
, rhsc
);
4003 /* For non-IPA mode, generate constraints necessary for a call of a
4004 const function that returns a pointer in the statement STMT. */
4007 handle_const_call (gimple stmt
, vec
<ce_s
> *results
)
4009 struct constraint_expr rhsc
;
4012 /* Treat nested const functions the same as pure functions as far
4013 as the static chain is concerned. */
4014 if (gimple_call_chain (stmt
))
4016 varinfo_t uses
= get_call_use_vi (stmt
);
4017 make_transitive_closure_constraints (uses
);
4018 make_constraint_to (uses
->id
, gimple_call_chain (stmt
));
4019 rhsc
.var
= uses
->id
;
4022 results
->safe_push (rhsc
);
4025 /* May return arguments. */
4026 for (k
= 0; k
< gimple_call_num_args (stmt
); ++k
)
4028 tree arg
= gimple_call_arg (stmt
, k
);
4029 vec
<ce_s
> argc
= vNULL
;
4031 struct constraint_expr
*argp
;
4032 get_constraint_for_rhs (arg
, &argc
);
4033 FOR_EACH_VEC_ELT (argc
, i
, argp
)
4034 results
->safe_push (*argp
);
4038 /* May return addresses of globals. */
4039 rhsc
.var
= nonlocal_id
;
4041 rhsc
.type
= ADDRESSOF
;
4042 results
->safe_push (rhsc
);
4045 /* For non-IPA mode, generate constraints necessary for a call to a
4046 pure function in statement STMT. */
4049 handle_pure_call (gimple stmt
, vec
<ce_s
> *results
)
4051 struct constraint_expr rhsc
;
4053 varinfo_t uses
= NULL
;
4055 /* Memory reached from pointer arguments is call-used. */
4056 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4058 tree arg
= gimple_call_arg (stmt
, i
);
4061 uses
= get_call_use_vi (stmt
);
4062 make_transitive_closure_constraints (uses
);
4064 make_constraint_to (uses
->id
, arg
);
4067 /* The static chain is used as well. */
4068 if (gimple_call_chain (stmt
))
4072 uses
= get_call_use_vi (stmt
);
4073 make_transitive_closure_constraints (uses
);
4075 make_constraint_to (uses
->id
, gimple_call_chain (stmt
));
4078 /* Pure functions may return call-used and nonlocal memory. */
4081 rhsc
.var
= uses
->id
;
4084 results
->safe_push (rhsc
);
4086 rhsc
.var
= nonlocal_id
;
4089 results
->safe_push (rhsc
);
4093 /* Return the varinfo for the callee of CALL. */
4096 get_fi_for_callee (gimple call
)
4098 tree decl
, fn
= gimple_call_fn (call
);
4100 if (fn
&& TREE_CODE (fn
) == OBJ_TYPE_REF
)
4101 fn
= OBJ_TYPE_REF_EXPR (fn
);
4103 /* If we can directly resolve the function being called, do so.
4104 Otherwise, it must be some sort of indirect expression that
4105 we should still be able to handle. */
4106 decl
= gimple_call_addr_fndecl (fn
);
4108 return get_vi_for_tree (decl
);
4110 /* If the function is anything other than a SSA name pointer we have no
4111 clue and should be getting ANYFN (well, ANYTHING for now). */
4112 if (!fn
|| TREE_CODE (fn
) != SSA_NAME
)
4113 return get_varinfo (anything_id
);
4115 if (SSA_NAME_IS_DEFAULT_DEF (fn
)
4116 && (TREE_CODE (SSA_NAME_VAR (fn
)) == PARM_DECL
4117 || TREE_CODE (SSA_NAME_VAR (fn
)) == RESULT_DECL
))
4118 fn
= SSA_NAME_VAR (fn
);
4120 return get_vi_for_tree (fn
);
4123 /* Create constraints for the builtin call T. Return true if the call
4124 was handled, otherwise false. */
4127 find_func_aliases_for_builtin_call (gimple t
)
4129 tree fndecl
= gimple_call_fndecl (t
);
4130 vec
<ce_s
> lhsc
= vNULL
;
4131 vec
<ce_s
> rhsc
= vNULL
;
4134 if (gimple_call_builtin_p (t
, BUILT_IN_NORMAL
))
4135 /* ??? All builtins that are handled here need to be handled
4136 in the alias-oracle query functions explicitly! */
4137 switch (DECL_FUNCTION_CODE (fndecl
))
4139 /* All the following functions return a pointer to the same object
4140 as their first argument points to. The functions do not add
4141 to the ESCAPED solution. The functions make the first argument
4142 pointed to memory point to what the second argument pointed to
4143 memory points to. */
4144 case BUILT_IN_STRCPY
:
4145 case BUILT_IN_STRNCPY
:
4146 case BUILT_IN_BCOPY
:
4147 case BUILT_IN_MEMCPY
:
4148 case BUILT_IN_MEMMOVE
:
4149 case BUILT_IN_MEMPCPY
:
4150 case BUILT_IN_STPCPY
:
4151 case BUILT_IN_STPNCPY
:
4152 case BUILT_IN_STRCAT
:
4153 case BUILT_IN_STRNCAT
:
4154 case BUILT_IN_STRCPY_CHK
:
4155 case BUILT_IN_STRNCPY_CHK
:
4156 case BUILT_IN_MEMCPY_CHK
:
4157 case BUILT_IN_MEMMOVE_CHK
:
4158 case BUILT_IN_MEMPCPY_CHK
:
4159 case BUILT_IN_STPCPY_CHK
:
4160 case BUILT_IN_STPNCPY_CHK
:
4161 case BUILT_IN_STRCAT_CHK
:
4162 case BUILT_IN_STRNCAT_CHK
:
4163 case BUILT_IN_TM_MEMCPY
:
4164 case BUILT_IN_TM_MEMMOVE
:
4166 tree res
= gimple_call_lhs (t
);
4167 tree dest
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (fndecl
)
4168 == BUILT_IN_BCOPY
? 1 : 0));
4169 tree src
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (fndecl
)
4170 == BUILT_IN_BCOPY
? 0 : 1));
4171 if (res
!= NULL_TREE
)
4173 get_constraint_for (res
, &lhsc
);
4174 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMPCPY
4175 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPCPY
4176 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPNCPY
4177 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMPCPY_CHK
4178 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPCPY_CHK
4179 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPNCPY_CHK
)
4180 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &rhsc
);
4182 get_constraint_for (dest
, &rhsc
);
4183 process_all_all_constraints (lhsc
, rhsc
);
4187 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4188 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4191 process_all_all_constraints (lhsc
, rhsc
);
4196 case BUILT_IN_MEMSET
:
4197 case BUILT_IN_MEMSET_CHK
:
4198 case BUILT_IN_TM_MEMSET
:
4200 tree res
= gimple_call_lhs (t
);
4201 tree dest
= gimple_call_arg (t
, 0);
4204 struct constraint_expr ac
;
4205 if (res
!= NULL_TREE
)
4207 get_constraint_for (res
, &lhsc
);
4208 get_constraint_for (dest
, &rhsc
);
4209 process_all_all_constraints (lhsc
, rhsc
);
4213 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4215 if (flag_delete_null_pointer_checks
4216 && integer_zerop (gimple_call_arg (t
, 1)))
4218 ac
.type
= ADDRESSOF
;
4219 ac
.var
= nothing_id
;
4224 ac
.var
= integer_id
;
4227 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4228 process_constraint (new_constraint (*lhsp
, ac
));
4232 case BUILT_IN_ASSUME_ALIGNED
:
4234 tree res
= gimple_call_lhs (t
);
4235 tree dest
= gimple_call_arg (t
, 0);
4236 if (res
!= NULL_TREE
)
4238 get_constraint_for (res
, &lhsc
);
4239 get_constraint_for (dest
, &rhsc
);
4240 process_all_all_constraints (lhsc
, rhsc
);
4246 /* All the following functions do not return pointers, do not
4247 modify the points-to sets of memory reachable from their
4248 arguments and do not add to the ESCAPED solution. */
4249 case BUILT_IN_SINCOS
:
4250 case BUILT_IN_SINCOSF
:
4251 case BUILT_IN_SINCOSL
:
4252 case BUILT_IN_FREXP
:
4253 case BUILT_IN_FREXPF
:
4254 case BUILT_IN_FREXPL
:
4255 case BUILT_IN_GAMMA_R
:
4256 case BUILT_IN_GAMMAF_R
:
4257 case BUILT_IN_GAMMAL_R
:
4258 case BUILT_IN_LGAMMA_R
:
4259 case BUILT_IN_LGAMMAF_R
:
4260 case BUILT_IN_LGAMMAL_R
:
4262 case BUILT_IN_MODFF
:
4263 case BUILT_IN_MODFL
:
4264 case BUILT_IN_REMQUO
:
4265 case BUILT_IN_REMQUOF
:
4266 case BUILT_IN_REMQUOL
:
4269 case BUILT_IN_STRDUP
:
4270 case BUILT_IN_STRNDUP
:
4271 if (gimple_call_lhs (t
))
4273 handle_lhs_call (t
, gimple_call_lhs (t
), gimple_call_flags (t
),
4275 get_constraint_for_ptr_offset (gimple_call_lhs (t
),
4277 get_constraint_for_ptr_offset (gimple_call_arg (t
, 0),
4281 process_all_all_constraints (lhsc
, rhsc
);
4287 /* String / character search functions return a pointer into the
4288 source string or NULL. */
4289 case BUILT_IN_INDEX
:
4290 case BUILT_IN_STRCHR
:
4291 case BUILT_IN_STRRCHR
:
4292 case BUILT_IN_MEMCHR
:
4293 case BUILT_IN_STRSTR
:
4294 case BUILT_IN_STRPBRK
:
4295 if (gimple_call_lhs (t
))
4297 tree src
= gimple_call_arg (t
, 0);
4298 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4299 constraint_expr nul
;
4300 nul
.var
= nothing_id
;
4302 nul
.type
= ADDRESSOF
;
4303 rhsc
.safe_push (nul
);
4304 get_constraint_for (gimple_call_lhs (t
), &lhsc
);
4305 process_all_all_constraints (lhsc
, rhsc
);
4310 /* Trampolines are special - they set up passing the static
4312 case BUILT_IN_INIT_TRAMPOLINE
:
4314 tree tramp
= gimple_call_arg (t
, 0);
4315 tree nfunc
= gimple_call_arg (t
, 1);
4316 tree frame
= gimple_call_arg (t
, 2);
4318 struct constraint_expr lhs
, *rhsp
;
4321 varinfo_t nfi
= NULL
;
4322 gcc_assert (TREE_CODE (nfunc
) == ADDR_EXPR
);
4323 nfi
= lookup_vi_for_tree (TREE_OPERAND (nfunc
, 0));
4326 lhs
= get_function_part_constraint (nfi
, fi_static_chain
);
4327 get_constraint_for (frame
, &rhsc
);
4328 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4329 process_constraint (new_constraint (lhs
, *rhsp
));
4332 /* Make the frame point to the function for
4333 the trampoline adjustment call. */
4334 get_constraint_for (tramp
, &lhsc
);
4336 get_constraint_for (nfunc
, &rhsc
);
4337 process_all_all_constraints (lhsc
, rhsc
);
4344 /* Else fallthru to generic handling which will let
4345 the frame escape. */
4348 case BUILT_IN_ADJUST_TRAMPOLINE
:
4350 tree tramp
= gimple_call_arg (t
, 0);
4351 tree res
= gimple_call_lhs (t
);
4352 if (in_ipa_mode
&& res
)
4354 get_constraint_for (res
, &lhsc
);
4355 get_constraint_for (tramp
, &rhsc
);
4357 process_all_all_constraints (lhsc
, rhsc
);
4363 CASE_BUILT_IN_TM_STORE (1):
4364 CASE_BUILT_IN_TM_STORE (2):
4365 CASE_BUILT_IN_TM_STORE (4):
4366 CASE_BUILT_IN_TM_STORE (8):
4367 CASE_BUILT_IN_TM_STORE (FLOAT
):
4368 CASE_BUILT_IN_TM_STORE (DOUBLE
):
4369 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
4370 CASE_BUILT_IN_TM_STORE (M64
):
4371 CASE_BUILT_IN_TM_STORE (M128
):
4372 CASE_BUILT_IN_TM_STORE (M256
):
4374 tree addr
= gimple_call_arg (t
, 0);
4375 tree src
= gimple_call_arg (t
, 1);
4377 get_constraint_for (addr
, &lhsc
);
4379 get_constraint_for (src
, &rhsc
);
4380 process_all_all_constraints (lhsc
, rhsc
);
4385 CASE_BUILT_IN_TM_LOAD (1):
4386 CASE_BUILT_IN_TM_LOAD (2):
4387 CASE_BUILT_IN_TM_LOAD (4):
4388 CASE_BUILT_IN_TM_LOAD (8):
4389 CASE_BUILT_IN_TM_LOAD (FLOAT
):
4390 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
4391 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
4392 CASE_BUILT_IN_TM_LOAD (M64
):
4393 CASE_BUILT_IN_TM_LOAD (M128
):
4394 CASE_BUILT_IN_TM_LOAD (M256
):
4396 tree dest
= gimple_call_lhs (t
);
4397 tree addr
= gimple_call_arg (t
, 0);
4399 get_constraint_for (dest
, &lhsc
);
4400 get_constraint_for (addr
, &rhsc
);
4402 process_all_all_constraints (lhsc
, rhsc
);
4407 /* Variadic argument handling needs to be handled in IPA
4409 case BUILT_IN_VA_START
:
4411 tree valist
= gimple_call_arg (t
, 0);
4412 struct constraint_expr rhs
, *lhsp
;
4414 get_constraint_for (valist
, &lhsc
);
4416 /* The va_list gets access to pointers in variadic
4417 arguments. Which we know in the case of IPA analysis
4418 and otherwise are just all nonlocal variables. */
4421 fi
= lookup_vi_for_tree (cfun
->decl
);
4422 rhs
= get_function_part_constraint (fi
, ~0);
4423 rhs
.type
= ADDRESSOF
;
4427 rhs
.var
= nonlocal_id
;
4428 rhs
.type
= ADDRESSOF
;
4431 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4432 process_constraint (new_constraint (*lhsp
, rhs
));
4434 /* va_list is clobbered. */
4435 make_constraint_to (get_call_clobber_vi (t
)->id
, valist
);
4438 /* va_end doesn't have any effect that matters. */
4439 case BUILT_IN_VA_END
:
4441 /* Alternate return. Simply give up for now. */
4442 case BUILT_IN_RETURN
:
4446 || !(fi
= get_vi_for_tree (cfun
->decl
)))
4447 make_constraint_from (get_varinfo (escaped_id
), anything_id
);
4448 else if (in_ipa_mode
4451 struct constraint_expr lhs
, rhs
;
4452 lhs
= get_function_part_constraint (fi
, fi_result
);
4453 rhs
.var
= anything_id
;
4456 process_constraint (new_constraint (lhs
, rhs
));
4460 /* printf-style functions may have hooks to set pointers to
4461 point to somewhere into the generated string. Leave them
4462 for a later exercise... */
4464 /* Fallthru to general call handling. */;
4470 /* Create constraints for the call T. */
4473 find_func_aliases_for_call (gimple t
)
4475 tree fndecl
= gimple_call_fndecl (t
);
4476 vec
<ce_s
> lhsc
= vNULL
;
4477 vec
<ce_s
> rhsc
= vNULL
;
4480 if (fndecl
!= NULL_TREE
4481 && DECL_BUILT_IN (fndecl
)
4482 && find_func_aliases_for_builtin_call (t
))
4485 fi
= get_fi_for_callee (t
);
4487 || (fndecl
&& !fi
->is_fn_info
))
4489 vec
<ce_s
> rhsc
= vNULL
;
4490 int flags
= gimple_call_flags (t
);
4492 /* Const functions can return their arguments and addresses
4493 of global memory but not of escaped memory. */
4494 if (flags
& (ECF_CONST
|ECF_NOVOPS
))
4496 if (gimple_call_lhs (t
))
4497 handle_const_call (t
, &rhsc
);
4499 /* Pure functions can return addresses in and of memory
4500 reachable from their arguments, but they are not an escape
4501 point for reachable memory of their arguments. */
4502 else if (flags
& (ECF_PURE
|ECF_LOOPING_CONST_OR_PURE
))
4503 handle_pure_call (t
, &rhsc
);
4505 handle_rhs_call (t
, &rhsc
);
4506 if (gimple_call_lhs (t
))
4507 handle_lhs_call (t
, gimple_call_lhs (t
), flags
, rhsc
, fndecl
);
4515 /* Assign all the passed arguments to the appropriate incoming
4516 parameters of the function. */
4517 for (j
= 0; j
< gimple_call_num_args (t
); j
++)
4519 struct constraint_expr lhs
;
4520 struct constraint_expr
*rhsp
;
4521 tree arg
= gimple_call_arg (t
, j
);
4523 get_constraint_for_rhs (arg
, &rhsc
);
4524 lhs
= get_function_part_constraint (fi
, fi_parm_base
+ j
);
4525 while (rhsc
.length () != 0)
4527 rhsp
= &rhsc
.last ();
4528 process_constraint (new_constraint (lhs
, *rhsp
));
4533 /* If we are returning a value, assign it to the result. */
4534 lhsop
= gimple_call_lhs (t
);
4537 struct constraint_expr rhs
;
4538 struct constraint_expr
*lhsp
;
4540 get_constraint_for (lhsop
, &lhsc
);
4541 rhs
= get_function_part_constraint (fi
, fi_result
);
4543 && DECL_RESULT (fndecl
)
4544 && DECL_BY_REFERENCE (DECL_RESULT (fndecl
)))
4546 vec
<ce_s
> tem
= vNULL
;
4547 tem
.safe_push (rhs
);
4552 FOR_EACH_VEC_ELT (lhsc
, j
, lhsp
)
4553 process_constraint (new_constraint (*lhsp
, rhs
));
4556 /* If we pass the result decl by reference, honor that. */
4559 && DECL_RESULT (fndecl
)
4560 && DECL_BY_REFERENCE (DECL_RESULT (fndecl
)))
4562 struct constraint_expr lhs
;
4563 struct constraint_expr
*rhsp
;
4565 get_constraint_for_address_of (lhsop
, &rhsc
);
4566 lhs
= get_function_part_constraint (fi
, fi_result
);
4567 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4568 process_constraint (new_constraint (lhs
, *rhsp
));
4572 /* If we use a static chain, pass it along. */
4573 if (gimple_call_chain (t
))
4575 struct constraint_expr lhs
;
4576 struct constraint_expr
*rhsp
;
4578 get_constraint_for (gimple_call_chain (t
), &rhsc
);
4579 lhs
= get_function_part_constraint (fi
, fi_static_chain
);
4580 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4581 process_constraint (new_constraint (lhs
, *rhsp
));
4586 /* Walk statement T setting up aliasing constraints according to the
4587 references found in T. This function is the main part of the
4588 constraint builder. AI points to auxiliary alias information used
4589 when building alias sets and computing alias grouping heuristics. */
4592 find_func_aliases (gimple origt
)
4595 vec
<ce_s
> lhsc
= vNULL
;
4596 vec
<ce_s
> rhsc
= vNULL
;
4597 struct constraint_expr
*c
;
4600 /* Now build constraints expressions. */
4601 if (gimple_code (t
) == GIMPLE_PHI
)
4606 /* For a phi node, assign all the arguments to
4608 get_constraint_for (gimple_phi_result (t
), &lhsc
);
4609 for (i
= 0; i
< gimple_phi_num_args (t
); i
++)
4611 tree strippedrhs
= PHI_ARG_DEF (t
, i
);
4613 STRIP_NOPS (strippedrhs
);
4614 get_constraint_for_rhs (gimple_phi_arg_def (t
, i
), &rhsc
);
4616 FOR_EACH_VEC_ELT (lhsc
, j
, c
)
4618 struct constraint_expr
*c2
;
4619 while (rhsc
.length () > 0)
4622 process_constraint (new_constraint (*c
, *c2
));
4628 /* In IPA mode, we need to generate constraints to pass call
4629 arguments through their calls. There are two cases,
4630 either a GIMPLE_CALL returning a value, or just a plain
4631 GIMPLE_CALL when we are not.
4633 In non-ipa mode, we need to generate constraints for each
4634 pointer passed by address. */
4635 else if (is_gimple_call (t
))
4636 find_func_aliases_for_call (t
);
4638 /* Otherwise, just a regular assignment statement. Only care about
4639 operations with pointer result, others are dealt with as escape
4640 points if they have pointer operands. */
4641 else if (is_gimple_assign (t
))
4643 /* Otherwise, just a regular assignment statement. */
4644 tree lhsop
= gimple_assign_lhs (t
);
4645 tree rhsop
= (gimple_num_ops (t
) == 2) ? gimple_assign_rhs1 (t
) : NULL
;
4647 if (rhsop
&& TREE_CLOBBER_P (rhsop
))
4648 /* Ignore clobbers, they don't actually store anything into
4651 else if (rhsop
&& AGGREGATE_TYPE_P (TREE_TYPE (lhsop
)))
4652 do_structure_copy (lhsop
, rhsop
);
4655 enum tree_code code
= gimple_assign_rhs_code (t
);
4657 get_constraint_for (lhsop
, &lhsc
);
4659 if (FLOAT_TYPE_P (TREE_TYPE (lhsop
)))
4660 /* If the operation produces a floating point result then
4661 assume the value is not produced to transfer a pointer. */
4663 else if (code
== POINTER_PLUS_EXPR
)
4664 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
4665 gimple_assign_rhs2 (t
), &rhsc
);
4666 else if (code
== BIT_AND_EXPR
4667 && TREE_CODE (gimple_assign_rhs2 (t
)) == INTEGER_CST
)
4669 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4670 the pointer. Handle it by offsetting it by UNKNOWN. */
4671 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
4674 else if ((CONVERT_EXPR_CODE_P (code
)
4675 && !(POINTER_TYPE_P (gimple_expr_type (t
))
4676 && !POINTER_TYPE_P (TREE_TYPE (rhsop
))))
4677 || gimple_assign_single_p (t
))
4678 get_constraint_for_rhs (rhsop
, &rhsc
);
4679 else if (code
== COND_EXPR
)
4681 /* The result is a merge of both COND_EXPR arms. */
4682 vec
<ce_s
> tmp
= vNULL
;
4683 struct constraint_expr
*rhsp
;
4685 get_constraint_for_rhs (gimple_assign_rhs2 (t
), &rhsc
);
4686 get_constraint_for_rhs (gimple_assign_rhs3 (t
), &tmp
);
4687 FOR_EACH_VEC_ELT (tmp
, i
, rhsp
)
4688 rhsc
.safe_push (*rhsp
);
4691 else if (truth_value_p (code
))
4692 /* Truth value results are not pointer (parts). Or at least
4693 very very unreasonable obfuscation of a part. */
4697 /* All other operations are merges. */
4698 vec
<ce_s
> tmp
= vNULL
;
4699 struct constraint_expr
*rhsp
;
4701 get_constraint_for_rhs (gimple_assign_rhs1 (t
), &rhsc
);
4702 for (i
= 2; i
< gimple_num_ops (t
); ++i
)
4704 get_constraint_for_rhs (gimple_op (t
, i
), &tmp
);
4705 FOR_EACH_VEC_ELT (tmp
, j
, rhsp
)
4706 rhsc
.safe_push (*rhsp
);
4711 process_all_all_constraints (lhsc
, rhsc
);
4713 /* If there is a store to a global variable the rhs escapes. */
4714 if ((lhsop
= get_base_address (lhsop
)) != NULL_TREE
4716 && is_global_var (lhsop
)
4718 || DECL_EXTERNAL (lhsop
) || TREE_PUBLIC (lhsop
)))
4719 make_escape_constraint (rhsop
);
4721 /* Handle escapes through return. */
4722 else if (gimple_code (t
) == GIMPLE_RETURN
4723 && gimple_return_retval (t
) != NULL_TREE
)
4727 || !(fi
= get_vi_for_tree (cfun
->decl
)))
4728 make_escape_constraint (gimple_return_retval (t
));
4729 else if (in_ipa_mode
4732 struct constraint_expr lhs
;
4733 struct constraint_expr
*rhsp
;
4736 lhs
= get_function_part_constraint (fi
, fi_result
);
4737 get_constraint_for_rhs (gimple_return_retval (t
), &rhsc
);
4738 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4739 process_constraint (new_constraint (lhs
, *rhsp
));
4742 /* Handle asms conservatively by adding escape constraints to everything. */
4743 else if (gimple_code (t
) == GIMPLE_ASM
)
4745 unsigned i
, noutputs
;
4746 const char **oconstraints
;
4747 const char *constraint
;
4748 bool allows_mem
, allows_reg
, is_inout
;
4750 noutputs
= gimple_asm_noutputs (t
);
4751 oconstraints
= XALLOCAVEC (const char *, noutputs
);
4753 for (i
= 0; i
< noutputs
; ++i
)
4755 tree link
= gimple_asm_output_op (t
, i
);
4756 tree op
= TREE_VALUE (link
);
4758 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4759 oconstraints
[i
] = constraint
;
4760 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
4761 &allows_reg
, &is_inout
);
4763 /* A memory constraint makes the address of the operand escape. */
4764 if (!allows_reg
&& allows_mem
)
4765 make_escape_constraint (build_fold_addr_expr (op
));
4767 /* The asm may read global memory, so outputs may point to
4768 any global memory. */
4771 vec
<ce_s
> lhsc
= vNULL
;
4772 struct constraint_expr rhsc
, *lhsp
;
4774 get_constraint_for (op
, &lhsc
);
4775 rhsc
.var
= nonlocal_id
;
4778 FOR_EACH_VEC_ELT (lhsc
, j
, lhsp
)
4779 process_constraint (new_constraint (*lhsp
, rhsc
));
4783 for (i
= 0; i
< gimple_asm_ninputs (t
); ++i
)
4785 tree link
= gimple_asm_input_op (t
, i
);
4786 tree op
= TREE_VALUE (link
);
4788 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4790 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
4791 &allows_mem
, &allows_reg
);
4793 /* A memory constraint makes the address of the operand escape. */
4794 if (!allows_reg
&& allows_mem
)
4795 make_escape_constraint (build_fold_addr_expr (op
));
4796 /* Strictly we'd only need the constraint to ESCAPED if
4797 the asm clobbers memory, otherwise using something
4798 along the lines of per-call clobbers/uses would be enough. */
4800 make_escape_constraint (op
);
4809 /* Create a constraint adding to the clobber set of FI the memory
4810 pointed to by PTR. */
4813 process_ipa_clobber (varinfo_t fi
, tree ptr
)
4815 vec
<ce_s
> ptrc
= vNULL
;
4816 struct constraint_expr
*c
, lhs
;
4818 get_constraint_for_rhs (ptr
, &ptrc
);
4819 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4820 FOR_EACH_VEC_ELT (ptrc
, i
, c
)
4821 process_constraint (new_constraint (lhs
, *c
));
4825 /* Walk statement T setting up clobber and use constraints according to the
4826 references found in T. This function is a main part of the
4827 IPA constraint builder. */
4830 find_func_clobbers (gimple origt
)
4833 vec
<ce_s
> lhsc
= vNULL
;
4834 vec
<ce_s
> rhsc
= vNULL
;
4837 /* Add constraints for clobbered/used in IPA mode.
4838 We are not interested in what automatic variables are clobbered
4839 or used as we only use the information in the caller to which
4840 they do not escape. */
4841 gcc_assert (in_ipa_mode
);
4843 /* If the stmt refers to memory in any way it better had a VUSE. */
4844 if (gimple_vuse (t
) == NULL_TREE
)
4847 /* We'd better have function information for the current function. */
4848 fi
= lookup_vi_for_tree (cfun
->decl
);
4849 gcc_assert (fi
!= NULL
);
4851 /* Account for stores in assignments and calls. */
4852 if (gimple_vdef (t
) != NULL_TREE
4853 && gimple_has_lhs (t
))
4855 tree lhs
= gimple_get_lhs (t
);
4857 while (handled_component_p (tem
))
4858 tem
= TREE_OPERAND (tem
, 0);
4860 && !auto_var_in_fn_p (tem
, cfun
->decl
))
4861 || INDIRECT_REF_P (tem
)
4862 || (TREE_CODE (tem
) == MEM_REF
4863 && !(TREE_CODE (TREE_OPERAND (tem
, 0)) == ADDR_EXPR
4865 (TREE_OPERAND (TREE_OPERAND (tem
, 0), 0), cfun
->decl
))))
4867 struct constraint_expr lhsc
, *rhsp
;
4869 lhsc
= get_function_part_constraint (fi
, fi_clobbers
);
4870 get_constraint_for_address_of (lhs
, &rhsc
);
4871 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4872 process_constraint (new_constraint (lhsc
, *rhsp
));
4877 /* Account for uses in assigments and returns. */
4878 if (gimple_assign_single_p (t
)
4879 || (gimple_code (t
) == GIMPLE_RETURN
4880 && gimple_return_retval (t
) != NULL_TREE
))
4882 tree rhs
= (gimple_assign_single_p (t
)
4883 ? gimple_assign_rhs1 (t
) : gimple_return_retval (t
));
4885 while (handled_component_p (tem
))
4886 tem
= TREE_OPERAND (tem
, 0);
4888 && !auto_var_in_fn_p (tem
, cfun
->decl
))
4889 || INDIRECT_REF_P (tem
)
4890 || (TREE_CODE (tem
) == MEM_REF
4891 && !(TREE_CODE (TREE_OPERAND (tem
, 0)) == ADDR_EXPR
4893 (TREE_OPERAND (TREE_OPERAND (tem
, 0), 0), cfun
->decl
))))
4895 struct constraint_expr lhs
, *rhsp
;
4897 lhs
= get_function_part_constraint (fi
, fi_uses
);
4898 get_constraint_for_address_of (rhs
, &rhsc
);
4899 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4900 process_constraint (new_constraint (lhs
, *rhsp
));
4905 if (is_gimple_call (t
))
4907 varinfo_t cfi
= NULL
;
4908 tree decl
= gimple_call_fndecl (t
);
4909 struct constraint_expr lhs
, rhs
;
4912 /* For builtins we do not have separate function info. For those
4913 we do not generate escapes for we have to generate clobbers/uses. */
4914 if (gimple_call_builtin_p (t
, BUILT_IN_NORMAL
))
4915 switch (DECL_FUNCTION_CODE (decl
))
4917 /* The following functions use and clobber memory pointed to
4918 by their arguments. */
4919 case BUILT_IN_STRCPY
:
4920 case BUILT_IN_STRNCPY
:
4921 case BUILT_IN_BCOPY
:
4922 case BUILT_IN_MEMCPY
:
4923 case BUILT_IN_MEMMOVE
:
4924 case BUILT_IN_MEMPCPY
:
4925 case BUILT_IN_STPCPY
:
4926 case BUILT_IN_STPNCPY
:
4927 case BUILT_IN_STRCAT
:
4928 case BUILT_IN_STRNCAT
:
4929 case BUILT_IN_STRCPY_CHK
:
4930 case BUILT_IN_STRNCPY_CHK
:
4931 case BUILT_IN_MEMCPY_CHK
:
4932 case BUILT_IN_MEMMOVE_CHK
:
4933 case BUILT_IN_MEMPCPY_CHK
:
4934 case BUILT_IN_STPCPY_CHK
:
4935 case BUILT_IN_STPNCPY_CHK
:
4936 case BUILT_IN_STRCAT_CHK
:
4937 case BUILT_IN_STRNCAT_CHK
:
4939 tree dest
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (decl
)
4940 == BUILT_IN_BCOPY
? 1 : 0));
4941 tree src
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (decl
)
4942 == BUILT_IN_BCOPY
? 0 : 1));
4944 struct constraint_expr
*rhsp
, *lhsp
;
4945 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4946 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4947 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4948 process_constraint (new_constraint (lhs
, *lhsp
));
4950 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4951 lhs
= get_function_part_constraint (fi
, fi_uses
);
4952 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4953 process_constraint (new_constraint (lhs
, *rhsp
));
4957 /* The following function clobbers memory pointed to by
4959 case BUILT_IN_MEMSET
:
4960 case BUILT_IN_MEMSET_CHK
:
4962 tree dest
= gimple_call_arg (t
, 0);
4965 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4966 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4967 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4968 process_constraint (new_constraint (lhs
, *lhsp
));
4972 /* The following functions clobber their second and third
4974 case BUILT_IN_SINCOS
:
4975 case BUILT_IN_SINCOSF
:
4976 case BUILT_IN_SINCOSL
:
4978 process_ipa_clobber (fi
, gimple_call_arg (t
, 1));
4979 process_ipa_clobber (fi
, gimple_call_arg (t
, 2));
4982 /* The following functions clobber their second argument. */
4983 case BUILT_IN_FREXP
:
4984 case BUILT_IN_FREXPF
:
4985 case BUILT_IN_FREXPL
:
4986 case BUILT_IN_LGAMMA_R
:
4987 case BUILT_IN_LGAMMAF_R
:
4988 case BUILT_IN_LGAMMAL_R
:
4989 case BUILT_IN_GAMMA_R
:
4990 case BUILT_IN_GAMMAF_R
:
4991 case BUILT_IN_GAMMAL_R
:
4993 case BUILT_IN_MODFF
:
4994 case BUILT_IN_MODFL
:
4996 process_ipa_clobber (fi
, gimple_call_arg (t
, 1));
4999 /* The following functions clobber their third argument. */
5000 case BUILT_IN_REMQUO
:
5001 case BUILT_IN_REMQUOF
:
5002 case BUILT_IN_REMQUOL
:
5004 process_ipa_clobber (fi
, gimple_call_arg (t
, 2));
5007 /* The following functions neither read nor clobber memory. */
5008 case BUILT_IN_ASSUME_ALIGNED
:
5011 /* Trampolines are of no interest to us. */
5012 case BUILT_IN_INIT_TRAMPOLINE
:
5013 case BUILT_IN_ADJUST_TRAMPOLINE
:
5015 case BUILT_IN_VA_START
:
5016 case BUILT_IN_VA_END
:
5018 /* printf-style functions may have hooks to set pointers to
5019 point to somewhere into the generated string. Leave them
5020 for a later exercise... */
5022 /* Fallthru to general call handling. */;
5025 /* Parameters passed by value are used. */
5026 lhs
= get_function_part_constraint (fi
, fi_uses
);
5027 for (i
= 0; i
< gimple_call_num_args (t
); i
++)
5029 struct constraint_expr
*rhsp
;
5030 tree arg
= gimple_call_arg (t
, i
);
5032 if (TREE_CODE (arg
) == SSA_NAME
5033 || is_gimple_min_invariant (arg
))
5036 get_constraint_for_address_of (arg
, &rhsc
);
5037 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
5038 process_constraint (new_constraint (lhs
, *rhsp
));
5042 /* Build constraints for propagating clobbers/uses along the
5044 cfi
= get_fi_for_callee (t
);
5045 if (cfi
->id
== anything_id
)
5047 if (gimple_vdef (t
))
5048 make_constraint_from (first_vi_for_offset (fi
, fi_clobbers
),
5050 make_constraint_from (first_vi_for_offset (fi
, fi_uses
),
5055 /* For callees without function info (that's external functions),
5056 ESCAPED is clobbered and used. */
5057 if (gimple_call_fndecl (t
)
5058 && !cfi
->is_fn_info
)
5062 if (gimple_vdef (t
))
5063 make_copy_constraint (first_vi_for_offset (fi
, fi_clobbers
),
5065 make_copy_constraint (first_vi_for_offset (fi
, fi_uses
), escaped_id
);
5067 /* Also honor the call statement use/clobber info. */
5068 if ((vi
= lookup_call_clobber_vi (t
)) != NULL
)
5069 make_copy_constraint (first_vi_for_offset (fi
, fi_clobbers
),
5071 if ((vi
= lookup_call_use_vi (t
)) != NULL
)
5072 make_copy_constraint (first_vi_for_offset (fi
, fi_uses
),
5077 /* Otherwise the caller clobbers and uses what the callee does.
5078 ??? This should use a new complex constraint that filters
5079 local variables of the callee. */
5080 if (gimple_vdef (t
))
5082 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
5083 rhs
= get_function_part_constraint (cfi
, fi_clobbers
);
5084 process_constraint (new_constraint (lhs
, rhs
));
5086 lhs
= get_function_part_constraint (fi
, fi_uses
);
5087 rhs
= get_function_part_constraint (cfi
, fi_uses
);
5088 process_constraint (new_constraint (lhs
, rhs
));
5090 else if (gimple_code (t
) == GIMPLE_ASM
)
5092 /* ??? Ick. We can do better. */
5093 if (gimple_vdef (t
))
5094 make_constraint_from (first_vi_for_offset (fi
, fi_clobbers
),
5096 make_constraint_from (first_vi_for_offset (fi
, fi_uses
),
5104 /* Find the first varinfo in the same variable as START that overlaps with
5105 OFFSET. Return NULL if we can't find one. */
5108 first_vi_for_offset (varinfo_t start
, unsigned HOST_WIDE_INT offset
)
5110 /* If the offset is outside of the variable, bail out. */
5111 if (offset
>= start
->fullsize
)
5114 /* If we cannot reach offset from start, lookup the first field
5115 and start from there. */
5116 if (start
->offset
> offset
)
5117 start
= get_varinfo (start
->head
);
5121 /* We may not find a variable in the field list with the actual
5122 offset when when we have glommed a structure to a variable.
5123 In that case, however, offset should still be within the size
5125 if (offset
>= start
->offset
5126 && (offset
- start
->offset
) < start
->size
)
5129 start
= vi_next (start
);
5135 /* Find the first varinfo in the same variable as START that overlaps with
5136 OFFSET. If there is no such varinfo the varinfo directly preceding
5137 OFFSET is returned. */
5140 first_or_preceding_vi_for_offset (varinfo_t start
,
5141 unsigned HOST_WIDE_INT offset
)
5143 /* If we cannot reach offset from start, lookup the first field
5144 and start from there. */
5145 if (start
->offset
> offset
)
5146 start
= get_varinfo (start
->head
);
5148 /* We may not find a variable in the field list with the actual
5149 offset when when we have glommed a structure to a variable.
5150 In that case, however, offset should still be within the size
5152 If we got beyond the offset we look for return the field
5153 directly preceding offset which may be the last field. */
5155 && offset
>= start
->offset
5156 && !((offset
- start
->offset
) < start
->size
))
5157 start
= vi_next (start
);
5163 /* This structure is used during pushing fields onto the fieldstack
5164 to track the offset of the field, since bitpos_of_field gives it
5165 relative to its immediate containing type, and we want it relative
5166 to the ultimate containing object. */
5170 /* Offset from the base of the base containing object to this field. */
5171 HOST_WIDE_INT offset
;
5173 /* Size, in bits, of the field. */
5174 unsigned HOST_WIDE_INT size
;
5176 unsigned has_unknown_size
: 1;
5178 unsigned must_have_pointers
: 1;
5180 unsigned may_have_pointers
: 1;
5182 unsigned only_restrict_pointers
: 1;
5184 typedef struct fieldoff fieldoff_s
;
5187 /* qsort comparison function for two fieldoff's PA and PB */
5190 fieldoff_compare (const void *pa
, const void *pb
)
5192 const fieldoff_s
*foa
= (const fieldoff_s
*)pa
;
5193 const fieldoff_s
*fob
= (const fieldoff_s
*)pb
;
5194 unsigned HOST_WIDE_INT foasize
, fobsize
;
5196 if (foa
->offset
< fob
->offset
)
5198 else if (foa
->offset
> fob
->offset
)
5201 foasize
= foa
->size
;
5202 fobsize
= fob
->size
;
5203 if (foasize
< fobsize
)
5205 else if (foasize
> fobsize
)
5210 /* Sort a fieldstack according to the field offset and sizes. */
5212 sort_fieldstack (vec
<fieldoff_s
> fieldstack
)
5214 fieldstack
.qsort (fieldoff_compare
);
5217 /* Return true if T is a type that can have subvars. */
5220 type_can_have_subvars (const_tree t
)
5222 /* Aggregates without overlapping fields can have subvars. */
5223 return TREE_CODE (t
) == RECORD_TYPE
;
5226 /* Return true if V is a tree that we can have subvars for.
5227 Normally, this is any aggregate type. Also complex
5228 types which are not gimple registers can have subvars. */
5231 var_can_have_subvars (const_tree v
)
5233 /* Volatile variables should never have subvars. */
5234 if (TREE_THIS_VOLATILE (v
))
5237 /* Non decls or memory tags can never have subvars. */
5241 return type_can_have_subvars (TREE_TYPE (v
));
5244 /* Return true if T is a type that does contain pointers. */
5247 type_must_have_pointers (tree type
)
5249 if (POINTER_TYPE_P (type
))
5252 if (TREE_CODE (type
) == ARRAY_TYPE
)
5253 return type_must_have_pointers (TREE_TYPE (type
));
5255 /* A function or method can have pointers as arguments, so track
5256 those separately. */
5257 if (TREE_CODE (type
) == FUNCTION_TYPE
5258 || TREE_CODE (type
) == METHOD_TYPE
)
5265 field_must_have_pointers (tree t
)
5267 return type_must_have_pointers (TREE_TYPE (t
));
5270 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5271 the fields of TYPE onto fieldstack, recording their offsets along
5274 OFFSET is used to keep track of the offset in this entire
5275 structure, rather than just the immediately containing structure.
5276 Returns false if the caller is supposed to handle the field we
5280 push_fields_onto_fieldstack (tree type
, vec
<fieldoff_s
> *fieldstack
,
5281 HOST_WIDE_INT offset
)
5284 bool empty_p
= true;
5286 if (TREE_CODE (type
) != RECORD_TYPE
)
5289 /* If the vector of fields is growing too big, bail out early.
5290 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5292 if (fieldstack
->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE
)
5295 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
5296 if (TREE_CODE (field
) == FIELD_DECL
)
5299 HOST_WIDE_INT foff
= bitpos_of_field (field
);
5301 if (!var_can_have_subvars (field
)
5302 || TREE_CODE (TREE_TYPE (field
)) == QUAL_UNION_TYPE
5303 || TREE_CODE (TREE_TYPE (field
)) == UNION_TYPE
)
5305 else if (!push_fields_onto_fieldstack
5306 (TREE_TYPE (field
), fieldstack
, offset
+ foff
)
5307 && (DECL_SIZE (field
)
5308 && !integer_zerop (DECL_SIZE (field
))))
5309 /* Empty structures may have actual size, like in C++. So
5310 see if we didn't push any subfields and the size is
5311 nonzero, push the field onto the stack. */
5316 fieldoff_s
*pair
= NULL
;
5317 bool has_unknown_size
= false;
5318 bool must_have_pointers_p
;
5320 if (!fieldstack
->is_empty ())
5321 pair
= &fieldstack
->last ();
5323 /* If there isn't anything at offset zero, create sth. */
5325 && offset
+ foff
!= 0)
5327 fieldoff_s e
= {0, offset
+ foff
, false, false, false, false};
5328 pair
= fieldstack
->safe_push (e
);
5331 if (!DECL_SIZE (field
)
5332 || !tree_fits_uhwi_p (DECL_SIZE (field
)))
5333 has_unknown_size
= true;
5335 /* If adjacent fields do not contain pointers merge them. */
5336 must_have_pointers_p
= field_must_have_pointers (field
);
5338 && !has_unknown_size
5339 && !must_have_pointers_p
5340 && !pair
->must_have_pointers
5341 && !pair
->has_unknown_size
5342 && pair
->offset
+ (HOST_WIDE_INT
)pair
->size
== offset
+ foff
)
5344 pair
->size
+= tree_to_hwi (DECL_SIZE (field
));
5349 e
.offset
= offset
+ foff
;
5350 e
.has_unknown_size
= has_unknown_size
;
5351 if (!has_unknown_size
)
5352 e
.size
= tree_to_hwi (DECL_SIZE (field
));
5355 e
.must_have_pointers
= must_have_pointers_p
;
5356 e
.may_have_pointers
= true;
5357 e
.only_restrict_pointers
5358 = (!has_unknown_size
5359 && POINTER_TYPE_P (TREE_TYPE (field
))
5360 && TYPE_RESTRICT (TREE_TYPE (field
)));
5361 fieldstack
->safe_push (e
);
5371 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5372 if it is a varargs function. */
5375 count_num_arguments (tree decl
, bool *is_varargs
)
5377 unsigned int num
= 0;
5380 /* Capture named arguments for K&R functions. They do not
5381 have a prototype and thus no TYPE_ARG_TYPES. */
5382 for (t
= DECL_ARGUMENTS (decl
); t
; t
= DECL_CHAIN (t
))
5385 /* Check if the function has variadic arguments. */
5386 for (t
= TYPE_ARG_TYPES (TREE_TYPE (decl
)); t
; t
= TREE_CHAIN (t
))
5387 if (TREE_VALUE (t
) == void_type_node
)
5395 /* Creation function node for DECL, using NAME, and return the index
5396 of the variable we've created for the function. */
5399 create_function_info_for (tree decl
, const char *name
)
5401 struct function
*fn
= DECL_STRUCT_FUNCTION (decl
);
5402 varinfo_t vi
, prev_vi
;
5405 bool is_varargs
= false;
5406 unsigned int num_args
= count_num_arguments (decl
, &is_varargs
);
5408 /* Create the variable info. */
5410 vi
= new_var_info (decl
, name
);
5413 vi
->fullsize
= fi_parm_base
+ num_args
;
5415 vi
->may_have_pointers
= false;
5418 insert_vi_for_tree (vi
->decl
, vi
);
5422 /* Create a variable for things the function clobbers and one for
5423 things the function uses. */
5425 varinfo_t clobbervi
, usevi
;
5426 const char *newname
;
5429 asprintf (&tempname
, "%s.clobber", name
);
5430 newname
= ggc_strdup (tempname
);
5433 clobbervi
= new_var_info (NULL
, newname
);
5434 clobbervi
->offset
= fi_clobbers
;
5435 clobbervi
->size
= 1;
5436 clobbervi
->fullsize
= vi
->fullsize
;
5437 clobbervi
->is_full_var
= true;
5438 clobbervi
->is_global_var
= false;
5439 gcc_assert (prev_vi
->offset
< clobbervi
->offset
);
5440 prev_vi
->next
= clobbervi
->id
;
5441 prev_vi
= clobbervi
;
5443 asprintf (&tempname
, "%s.use", name
);
5444 newname
= ggc_strdup (tempname
);
5447 usevi
= new_var_info (NULL
, newname
);
5448 usevi
->offset
= fi_uses
;
5450 usevi
->fullsize
= vi
->fullsize
;
5451 usevi
->is_full_var
= true;
5452 usevi
->is_global_var
= false;
5453 gcc_assert (prev_vi
->offset
< usevi
->offset
);
5454 prev_vi
->next
= usevi
->id
;
5458 /* And one for the static chain. */
5459 if (fn
->static_chain_decl
!= NULL_TREE
)
5462 const char *newname
;
5465 asprintf (&tempname
, "%s.chain", name
);
5466 newname
= ggc_strdup (tempname
);
5469 chainvi
= new_var_info (fn
->static_chain_decl
, newname
);
5470 chainvi
->offset
= fi_static_chain
;
5472 chainvi
->fullsize
= vi
->fullsize
;
5473 chainvi
->is_full_var
= true;
5474 chainvi
->is_global_var
= false;
5475 gcc_assert (prev_vi
->offset
< chainvi
->offset
);
5476 prev_vi
->next
= chainvi
->id
;
5478 insert_vi_for_tree (fn
->static_chain_decl
, chainvi
);
5481 /* Create a variable for the return var. */
5482 if (DECL_RESULT (decl
) != NULL
5483 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
5486 const char *newname
;
5488 tree resultdecl
= decl
;
5490 if (DECL_RESULT (decl
))
5491 resultdecl
= DECL_RESULT (decl
);
5493 asprintf (&tempname
, "%s.result", name
);
5494 newname
= ggc_strdup (tempname
);
5497 resultvi
= new_var_info (resultdecl
, newname
);
5498 resultvi
->offset
= fi_result
;
5500 resultvi
->fullsize
= vi
->fullsize
;
5501 resultvi
->is_full_var
= true;
5502 if (DECL_RESULT (decl
))
5503 resultvi
->may_have_pointers
= true;
5504 gcc_assert (prev_vi
->offset
< resultvi
->offset
);
5505 prev_vi
->next
= resultvi
->id
;
5507 if (DECL_RESULT (decl
))
5508 insert_vi_for_tree (DECL_RESULT (decl
), resultvi
);
5511 /* Set up variables for each argument. */
5512 arg
= DECL_ARGUMENTS (decl
);
5513 for (i
= 0; i
< num_args
; i
++)
5516 const char *newname
;
5518 tree argdecl
= decl
;
5523 asprintf (&tempname
, "%s.arg%d", name
, i
);
5524 newname
= ggc_strdup (tempname
);
5527 argvi
= new_var_info (argdecl
, newname
);
5528 argvi
->offset
= fi_parm_base
+ i
;
5530 argvi
->is_full_var
= true;
5531 argvi
->fullsize
= vi
->fullsize
;
5533 argvi
->may_have_pointers
= true;
5534 gcc_assert (prev_vi
->offset
< argvi
->offset
);
5535 prev_vi
->next
= argvi
->id
;
5539 insert_vi_for_tree (arg
, argvi
);
5540 arg
= DECL_CHAIN (arg
);
5544 /* Add one representative for all further args. */
5548 const char *newname
;
5552 asprintf (&tempname
, "%s.varargs", name
);
5553 newname
= ggc_strdup (tempname
);
5556 /* We need sth that can be pointed to for va_start. */
5557 decl
= build_fake_var_decl (ptr_type_node
);
5559 argvi
= new_var_info (decl
, newname
);
5560 argvi
->offset
= fi_parm_base
+ num_args
;
5562 argvi
->is_full_var
= true;
5563 argvi
->is_heap_var
= true;
5564 argvi
->fullsize
= vi
->fullsize
;
5565 gcc_assert (prev_vi
->offset
< argvi
->offset
);
5566 prev_vi
->next
= argvi
->id
;
5574 /* Return true if FIELDSTACK contains fields that overlap.
5575 FIELDSTACK is assumed to be sorted by offset. */
5578 check_for_overlaps (vec
<fieldoff_s
> fieldstack
)
5580 fieldoff_s
*fo
= NULL
;
5582 HOST_WIDE_INT lastoffset
= -1;
5584 FOR_EACH_VEC_ELT (fieldstack
, i
, fo
)
5586 if (fo
->offset
== lastoffset
)
5588 lastoffset
= fo
->offset
;
5593 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5594 This will also create any varinfo structures necessary for fields
5598 create_variable_info_for_1 (tree decl
, const char *name
)
5600 varinfo_t vi
, newvi
;
5601 tree decl_type
= TREE_TYPE (decl
);
5602 tree declsize
= DECL_P (decl
) ? DECL_SIZE (decl
) : TYPE_SIZE (decl_type
);
5603 vec
<fieldoff_s
> fieldstack
= vNULL
;
5608 || !tree_fits_uhwi_p (declsize
))
5610 vi
= new_var_info (decl
, name
);
5614 vi
->is_unknown_size_var
= true;
5615 vi
->is_full_var
= true;
5616 vi
->may_have_pointers
= true;
5620 /* Collect field information. */
5621 if (use_field_sensitive
5622 && var_can_have_subvars (decl
)
5623 /* ??? Force us to not use subfields for global initializers
5624 in IPA mode. Else we'd have to parse arbitrary initializers. */
5626 && is_global_var (decl
)
5627 && DECL_INITIAL (decl
)))
5629 fieldoff_s
*fo
= NULL
;
5630 bool notokay
= false;
5633 push_fields_onto_fieldstack (decl_type
, &fieldstack
, 0);
5635 for (i
= 0; !notokay
&& fieldstack
.iterate (i
, &fo
); i
++)
5636 if (fo
->has_unknown_size
5643 /* We can't sort them if we have a field with a variable sized type,
5644 which will make notokay = true. In that case, we are going to return
5645 without creating varinfos for the fields anyway, so sorting them is a
5649 sort_fieldstack (fieldstack
);
5650 /* Due to some C++ FE issues, like PR 22488, we might end up
5651 what appear to be overlapping fields even though they,
5652 in reality, do not overlap. Until the C++ FE is fixed,
5653 we will simply disable field-sensitivity for these cases. */
5654 notokay
= check_for_overlaps (fieldstack
);
5658 fieldstack
.release ();
5661 /* If we didn't end up collecting sub-variables create a full
5662 variable for the decl. */
5663 if (fieldstack
.length () <= 1
5664 || fieldstack
.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE
)
5666 vi
= new_var_info (decl
, name
);
5668 vi
->may_have_pointers
= true;
5669 vi
->fullsize
= tree_to_hwi (declsize
);
5670 vi
->size
= vi
->fullsize
;
5671 vi
->is_full_var
= true;
5672 fieldstack
.release ();
5676 vi
= new_var_info (decl
, name
);
5677 vi
->fullsize
= tree_to_hwi (declsize
);
5678 for (i
= 0, newvi
= vi
;
5679 fieldstack
.iterate (i
, &fo
);
5680 ++i
, newvi
= vi_next (newvi
))
5682 const char *newname
= "NULL";
5687 asprintf (&tempname
, "%s." HOST_WIDE_INT_PRINT_DEC
5688 "+" HOST_WIDE_INT_PRINT_DEC
, name
, fo
->offset
, fo
->size
);
5689 newname
= ggc_strdup (tempname
);
5692 newvi
->name
= newname
;
5693 newvi
->offset
= fo
->offset
;
5694 newvi
->size
= fo
->size
;
5695 newvi
->fullsize
= vi
->fullsize
;
5696 newvi
->may_have_pointers
= fo
->may_have_pointers
;
5697 newvi
->only_restrict_pointers
= fo
->only_restrict_pointers
;
5698 if (i
+ 1 < fieldstack
.length ())
5700 varinfo_t tem
= new_var_info (decl
, name
);
5701 newvi
->next
= tem
->id
;
5706 fieldstack
.release ();
5712 create_variable_info_for (tree decl
, const char *name
)
5714 varinfo_t vi
= create_variable_info_for_1 (decl
, name
);
5715 unsigned int id
= vi
->id
;
5717 insert_vi_for_tree (decl
, vi
);
5719 if (TREE_CODE (decl
) != VAR_DECL
)
5722 /* Create initial constraints for globals. */
5723 for (; vi
; vi
= vi_next (vi
))
5725 if (!vi
->may_have_pointers
5726 || !vi
->is_global_var
)
5729 /* Mark global restrict qualified pointers. */
5730 if ((POINTER_TYPE_P (TREE_TYPE (decl
))
5731 && TYPE_RESTRICT (TREE_TYPE (decl
)))
5732 || vi
->only_restrict_pointers
)
5734 make_constraint_from_global_restrict (vi
, "GLOBAL_RESTRICT");
5738 /* In non-IPA mode the initializer from nonlocal is all we need. */
5740 || DECL_HARD_REGISTER (decl
))
5741 make_copy_constraint (vi
, nonlocal_id
);
5743 /* In IPA mode parse the initializer and generate proper constraints
5747 struct varpool_node
*vnode
= varpool_get_node (decl
);
5749 /* For escaped variables initialize them from nonlocal. */
5750 if (!varpool_all_refs_explicit_p (vnode
))
5751 make_copy_constraint (vi
, nonlocal_id
);
5753 /* If this is a global variable with an initializer and we are in
5754 IPA mode generate constraints for it. */
5755 if (DECL_INITIAL (decl
)
5756 && vnode
->definition
)
5758 vec
<ce_s
> rhsc
= vNULL
;
5759 struct constraint_expr lhs
, *rhsp
;
5761 get_constraint_for_rhs (DECL_INITIAL (decl
), &rhsc
);
5765 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5766 process_constraint (new_constraint (lhs
, *rhsp
));
5767 /* If this is a variable that escapes from the unit
5768 the initializer escapes as well. */
5769 if (!varpool_all_refs_explicit_p (vnode
))
5771 lhs
.var
= escaped_id
;
5774 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5775 process_constraint (new_constraint (lhs
, *rhsp
));
5785 /* Print out the points-to solution for VAR to FILE. */
5788 dump_solution_for_var (FILE *file
, unsigned int var
)
5790 varinfo_t vi
= get_varinfo (var
);
5794 /* Dump the solution for unified vars anyway, this avoids difficulties
5795 in scanning dumps in the testsuite. */
5796 fprintf (file
, "%s = { ", vi
->name
);
5797 vi
= get_varinfo (find (var
));
5798 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
5799 fprintf (file
, "%s ", get_varinfo (i
)->name
);
5800 fprintf (file
, "}");
5802 /* But note when the variable was unified. */
5804 fprintf (file
, " same as %s", vi
->name
);
5806 fprintf (file
, "\n");
5809 /* Print the points-to solution for VAR to stdout. */
5812 debug_solution_for_var (unsigned int var
)
5814 dump_solution_for_var (stdout
, var
);
5817 /* Create varinfo structures for all of the variables in the
5818 function for intraprocedural mode. */
5821 intra_create_variable_infos (void)
5825 /* For each incoming pointer argument arg, create the constraint ARG
5826 = NONLOCAL or a dummy variable if it is a restrict qualified
5827 passed-by-reference argument. */
5828 for (t
= DECL_ARGUMENTS (current_function_decl
); t
; t
= DECL_CHAIN (t
))
5830 varinfo_t p
= get_vi_for_tree (t
);
5832 /* For restrict qualified pointers to objects passed by
5833 reference build a real representative for the pointed-to object.
5834 Treat restrict qualified references the same. */
5835 if (TYPE_RESTRICT (TREE_TYPE (t
))
5836 && ((DECL_BY_REFERENCE (t
) && POINTER_TYPE_P (TREE_TYPE (t
)))
5837 || TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
)
5838 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t
))))
5840 struct constraint_expr lhsc
, rhsc
;
5842 tree heapvar
= build_fake_var_decl (TREE_TYPE (TREE_TYPE (t
)));
5843 DECL_EXTERNAL (heapvar
) = 1;
5844 vi
= create_variable_info_for_1 (heapvar
, "PARM_NOALIAS");
5845 insert_vi_for_tree (heapvar
, vi
);
5850 rhsc
.type
= ADDRESSOF
;
5852 process_constraint (new_constraint (lhsc
, rhsc
));
5853 for (; vi
; vi
= vi_next (vi
))
5854 if (vi
->may_have_pointers
)
5856 if (vi
->only_restrict_pointers
)
5857 make_constraint_from_global_restrict (vi
, "GLOBAL_RESTRICT");
5859 make_copy_constraint (vi
, nonlocal_id
);
5864 if (POINTER_TYPE_P (TREE_TYPE (t
))
5865 && TYPE_RESTRICT (TREE_TYPE (t
)))
5866 make_constraint_from_global_restrict (p
, "PARM_RESTRICT");
5869 for (; p
; p
= vi_next (p
))
5871 if (p
->only_restrict_pointers
)
5872 make_constraint_from_global_restrict (p
, "PARM_RESTRICT");
5873 else if (p
->may_have_pointers
)
5874 make_constraint_from (p
, nonlocal_id
);
5879 /* Add a constraint for a result decl that is passed by reference. */
5880 if (DECL_RESULT (cfun
->decl
)
5881 && DECL_BY_REFERENCE (DECL_RESULT (cfun
->decl
)))
5883 varinfo_t p
, result_vi
= get_vi_for_tree (DECL_RESULT (cfun
->decl
));
5885 for (p
= result_vi
; p
; p
= vi_next (p
))
5886 make_constraint_from (p
, nonlocal_id
);
5889 /* Add a constraint for the incoming static chain parameter. */
5890 if (cfun
->static_chain_decl
!= NULL_TREE
)
5892 varinfo_t p
, chain_vi
= get_vi_for_tree (cfun
->static_chain_decl
);
5894 for (p
= chain_vi
; p
; p
= vi_next (p
))
5895 make_constraint_from (p
, nonlocal_id
);
5899 /* Structure used to put solution bitmaps in a hashtable so they can
5900 be shared among variables with the same points-to set. */
5902 typedef struct shared_bitmap_info
5906 } *shared_bitmap_info_t
;
5907 typedef const struct shared_bitmap_info
*const_shared_bitmap_info_t
;
5909 /* Shared_bitmap hashtable helpers. */
5911 struct shared_bitmap_hasher
: typed_free_remove
<shared_bitmap_info
>
5913 typedef shared_bitmap_info value_type
;
5914 typedef shared_bitmap_info compare_type
;
5915 static inline hashval_t
hash (const value_type
*);
5916 static inline bool equal (const value_type
*, const compare_type
*);
5919 /* Hash function for a shared_bitmap_info_t */
5922 shared_bitmap_hasher::hash (const value_type
*bi
)
5924 return bi
->hashcode
;
5927 /* Equality function for two shared_bitmap_info_t's. */
5930 shared_bitmap_hasher::equal (const value_type
*sbi1
, const compare_type
*sbi2
)
5932 return bitmap_equal_p (sbi1
->pt_vars
, sbi2
->pt_vars
);
5935 /* Shared_bitmap hashtable. */
5937 static hash_table
<shared_bitmap_hasher
> shared_bitmap_table
;
5939 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5940 existing instance if there is one, NULL otherwise. */
5943 shared_bitmap_lookup (bitmap pt_vars
)
5945 shared_bitmap_info
**slot
;
5946 struct shared_bitmap_info sbi
;
5948 sbi
.pt_vars
= pt_vars
;
5949 sbi
.hashcode
= bitmap_hash (pt_vars
);
5951 slot
= shared_bitmap_table
.find_slot_with_hash (&sbi
, sbi
.hashcode
,
5956 return (*slot
)->pt_vars
;
5960 /* Add a bitmap to the shared bitmap hashtable. */
5963 shared_bitmap_add (bitmap pt_vars
)
5965 shared_bitmap_info
**slot
;
5966 shared_bitmap_info_t sbi
= XNEW (struct shared_bitmap_info
);
5968 sbi
->pt_vars
= pt_vars
;
5969 sbi
->hashcode
= bitmap_hash (pt_vars
);
5971 slot
= shared_bitmap_table
.find_slot_with_hash (sbi
, sbi
->hashcode
, INSERT
);
5972 gcc_assert (!*slot
);
5977 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5980 set_uids_in_ptset (bitmap into
, bitmap from
, struct pt_solution
*pt
)
5985 EXECUTE_IF_SET_IN_BITMAP (from
, 0, i
, bi
)
5987 varinfo_t vi
= get_varinfo (i
);
5989 /* The only artificial variables that are allowed in a may-alias
5990 set are heap variables. */
5991 if (vi
->is_artificial_var
&& !vi
->is_heap_var
)
5994 if (TREE_CODE (vi
->decl
) == VAR_DECL
5995 || TREE_CODE (vi
->decl
) == PARM_DECL
5996 || TREE_CODE (vi
->decl
) == RESULT_DECL
)
5998 /* If we are in IPA mode we will not recompute points-to
5999 sets after inlining so make sure they stay valid. */
6001 && !DECL_PT_UID_SET_P (vi
->decl
))
6002 SET_DECL_PT_UID (vi
->decl
, DECL_UID (vi
->decl
));
6004 /* Add the decl to the points-to set. Note that the points-to
6005 set contains global variables. */
6006 bitmap_set_bit (into
, DECL_PT_UID (vi
->decl
));
6007 if (vi
->is_global_var
)
6008 pt
->vars_contains_global
= true;
6014 /* Compute the points-to solution *PT for the variable VI. */
6016 static struct pt_solution
6017 find_what_var_points_to (varinfo_t orig_vi
)
6021 bitmap finished_solution
;
6025 struct pt_solution
*pt
;
6027 /* This variable may have been collapsed, let's get the real
6029 vi
= get_varinfo (find (orig_vi
->id
));
6031 /* See if we have already computed the solution and return it. */
6032 slot
= pointer_map_insert (final_solutions
, vi
);
6034 return *(struct pt_solution
*)*slot
;
6036 *slot
= pt
= XOBNEW (&final_solutions_obstack
, struct pt_solution
);
6037 memset (pt
, 0, sizeof (struct pt_solution
));
6039 /* Translate artificial variables into SSA_NAME_PTR_INFO
6041 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
6043 varinfo_t vi
= get_varinfo (i
);
6045 if (vi
->is_artificial_var
)
6047 if (vi
->id
== nothing_id
)
6049 else if (vi
->id
== escaped_id
)
6052 pt
->ipa_escaped
= 1;
6056 else if (vi
->id
== nonlocal_id
)
6058 else if (vi
->is_heap_var
)
6059 /* We represent heapvars in the points-to set properly. */
6061 else if (vi
->id
== readonly_id
)
6064 else if (vi
->id
== anything_id
6065 || vi
->id
== integer_id
)
6070 /* Instead of doing extra work, simply do not create
6071 elaborate points-to information for pt_anything pointers. */
6075 /* Share the final set of variables when possible. */
6076 finished_solution
= BITMAP_GGC_ALLOC ();
6077 stats
.points_to_sets_created
++;
6079 set_uids_in_ptset (finished_solution
, vi
->solution
, pt
);
6080 result
= shared_bitmap_lookup (finished_solution
);
6083 shared_bitmap_add (finished_solution
);
6084 pt
->vars
= finished_solution
;
6089 bitmap_clear (finished_solution
);
6095 /* Given a pointer variable P, fill in its points-to set. */
6098 find_what_p_points_to (tree p
)
6100 struct ptr_info_def
*pi
;
6104 /* For parameters, get at the points-to set for the actual parm
6106 if (TREE_CODE (p
) == SSA_NAME
6107 && SSA_NAME_IS_DEFAULT_DEF (p
)
6108 && (TREE_CODE (SSA_NAME_VAR (p
)) == PARM_DECL
6109 || TREE_CODE (SSA_NAME_VAR (p
)) == RESULT_DECL
))
6110 lookup_p
= SSA_NAME_VAR (p
);
6112 vi
= lookup_vi_for_tree (lookup_p
);
6116 pi
= get_ptr_info (p
);
6117 pi
->pt
= find_what_var_points_to (vi
);
6121 /* Query statistics for points-to solutions. */
6124 unsigned HOST_WIDE_INT pt_solution_includes_may_alias
;
6125 unsigned HOST_WIDE_INT pt_solution_includes_no_alias
;
6126 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias
;
6127 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias
;
6131 dump_pta_stats (FILE *s
)
6133 fprintf (s
, "\nPTA query stats:\n");
6134 fprintf (s
, " pt_solution_includes: "
6135 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
6136 HOST_WIDE_INT_PRINT_DEC
" queries\n",
6137 pta_stats
.pt_solution_includes_no_alias
,
6138 pta_stats
.pt_solution_includes_no_alias
6139 + pta_stats
.pt_solution_includes_may_alias
);
6140 fprintf (s
, " pt_solutions_intersect: "
6141 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
6142 HOST_WIDE_INT_PRINT_DEC
" queries\n",
6143 pta_stats
.pt_solutions_intersect_no_alias
,
6144 pta_stats
.pt_solutions_intersect_no_alias
6145 + pta_stats
.pt_solutions_intersect_may_alias
);
6149 /* Reset the points-to solution *PT to a conservative default
6150 (point to anything). */
6153 pt_solution_reset (struct pt_solution
*pt
)
6155 memset (pt
, 0, sizeof (struct pt_solution
));
6156 pt
->anything
= true;
6159 /* Set the points-to solution *PT to point only to the variables
6160 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6161 global variables and VARS_CONTAINS_RESTRICT specifies whether
6162 it contains restrict tag variables. */
6165 pt_solution_set (struct pt_solution
*pt
, bitmap vars
, bool vars_contains_global
)
6167 memset (pt
, 0, sizeof (struct pt_solution
));
6169 pt
->vars_contains_global
= vars_contains_global
;
6172 /* Set the points-to solution *PT to point only to the variable VAR. */
6175 pt_solution_set_var (struct pt_solution
*pt
, tree var
)
6177 memset (pt
, 0, sizeof (struct pt_solution
));
6178 pt
->vars
= BITMAP_GGC_ALLOC ();
6179 bitmap_set_bit (pt
->vars
, DECL_PT_UID (var
));
6180 pt
->vars_contains_global
= is_global_var (var
);
6183 /* Computes the union of the points-to solutions *DEST and *SRC and
6184 stores the result in *DEST. This changes the points-to bitmap
6185 of *DEST and thus may not be used if that might be shared.
6186 The points-to bitmap of *SRC and *DEST will not be shared after
6187 this function if they were not before. */
6190 pt_solution_ior_into (struct pt_solution
*dest
, struct pt_solution
*src
)
6192 dest
->anything
|= src
->anything
;
6195 pt_solution_reset (dest
);
6199 dest
->nonlocal
|= src
->nonlocal
;
6200 dest
->escaped
|= src
->escaped
;
6201 dest
->ipa_escaped
|= src
->ipa_escaped
;
6202 dest
->null
|= src
->null
;
6203 dest
->vars_contains_global
|= src
->vars_contains_global
;
6208 dest
->vars
= BITMAP_GGC_ALLOC ();
6209 bitmap_ior_into (dest
->vars
, src
->vars
);
6212 /* Return true if the points-to solution *PT is empty. */
6215 pt_solution_empty_p (struct pt_solution
*pt
)
6222 && !bitmap_empty_p (pt
->vars
))
6225 /* If the solution includes ESCAPED, check if that is empty. */
6227 && !pt_solution_empty_p (&cfun
->gimple_df
->escaped
))
6230 /* If the solution includes ESCAPED, check if that is empty. */
6232 && !pt_solution_empty_p (&ipa_escaped_pt
))
6238 /* Return true if the points-to solution *PT only point to a single var, and
6239 return the var uid in *UID. */
6242 pt_solution_singleton_p (struct pt_solution
*pt
, unsigned *uid
)
6244 if (pt
->anything
|| pt
->nonlocal
|| pt
->escaped
|| pt
->ipa_escaped
6245 || pt
->null
|| pt
->vars
== NULL
6246 || !bitmap_single_bit_set_p (pt
->vars
))
6249 *uid
= bitmap_first_set_bit (pt
->vars
);
6253 /* Return true if the points-to solution *PT includes global memory. */
6256 pt_solution_includes_global (struct pt_solution
*pt
)
6260 || pt
->vars_contains_global
)
6264 return pt_solution_includes_global (&cfun
->gimple_df
->escaped
);
6266 if (pt
->ipa_escaped
)
6267 return pt_solution_includes_global (&ipa_escaped_pt
);
6269 /* ??? This predicate is not correct for the IPA-PTA solution
6270 as we do not properly distinguish between unit escape points
6271 and global variables. */
6272 if (cfun
->gimple_df
->ipa_pta
)
6278 /* Return true if the points-to solution *PT includes the variable
6279 declaration DECL. */
6282 pt_solution_includes_1 (struct pt_solution
*pt
, const_tree decl
)
6288 && is_global_var (decl
))
6292 && bitmap_bit_p (pt
->vars
, DECL_PT_UID (decl
)))
6295 /* If the solution includes ESCAPED, check it. */
6297 && pt_solution_includes_1 (&cfun
->gimple_df
->escaped
, decl
))
6300 /* If the solution includes ESCAPED, check it. */
6302 && pt_solution_includes_1 (&ipa_escaped_pt
, decl
))
6309 pt_solution_includes (struct pt_solution
*pt
, const_tree decl
)
6311 bool res
= pt_solution_includes_1 (pt
, decl
);
6313 ++pta_stats
.pt_solution_includes_may_alias
;
6315 ++pta_stats
.pt_solution_includes_no_alias
;
6319 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6323 pt_solutions_intersect_1 (struct pt_solution
*pt1
, struct pt_solution
*pt2
)
6325 if (pt1
->anything
|| pt2
->anything
)
6328 /* If either points to unknown global memory and the other points to
6329 any global memory they alias. */
6332 || pt2
->vars_contains_global
))
6334 && pt1
->vars_contains_global
))
6337 /* Check the escaped solution if required. */
6338 if ((pt1
->escaped
|| pt2
->escaped
)
6339 && !pt_solution_empty_p (&cfun
->gimple_df
->escaped
))
6341 /* If both point to escaped memory and that solution
6342 is not empty they alias. */
6343 if (pt1
->escaped
&& pt2
->escaped
)
6346 /* If either points to escaped memory see if the escaped solution
6347 intersects with the other. */
6349 && pt_solutions_intersect_1 (&cfun
->gimple_df
->escaped
, pt2
))
6351 && pt_solutions_intersect_1 (&cfun
->gimple_df
->escaped
, pt1
)))
6355 /* Check the escaped solution if required.
6356 ??? Do we need to check the local against the IPA escaped sets? */
6357 if ((pt1
->ipa_escaped
|| pt2
->ipa_escaped
)
6358 && !pt_solution_empty_p (&ipa_escaped_pt
))
6360 /* If both point to escaped memory and that solution
6361 is not empty they alias. */
6362 if (pt1
->ipa_escaped
&& pt2
->ipa_escaped
)
6365 /* If either points to escaped memory see if the escaped solution
6366 intersects with the other. */
6367 if ((pt1
->ipa_escaped
6368 && pt_solutions_intersect_1 (&ipa_escaped_pt
, pt2
))
6369 || (pt2
->ipa_escaped
6370 && pt_solutions_intersect_1 (&ipa_escaped_pt
, pt1
)))
6374 /* Now both pointers alias if their points-to solution intersects. */
6377 && bitmap_intersect_p (pt1
->vars
, pt2
->vars
));
6381 pt_solutions_intersect (struct pt_solution
*pt1
, struct pt_solution
*pt2
)
6383 bool res
= pt_solutions_intersect_1 (pt1
, pt2
);
6385 ++pta_stats
.pt_solutions_intersect_may_alias
;
6387 ++pta_stats
.pt_solutions_intersect_no_alias
;
6392 /* Dump points-to information to OUTFILE. */
6395 dump_sa_points_to_info (FILE *outfile
)
6399 fprintf (outfile
, "\nPoints-to sets\n\n");
6401 if (dump_flags
& TDF_STATS
)
6403 fprintf (outfile
, "Stats:\n");
6404 fprintf (outfile
, "Total vars: %d\n", stats
.total_vars
);
6405 fprintf (outfile
, "Non-pointer vars: %d\n",
6406 stats
.nonpointer_vars
);
6407 fprintf (outfile
, "Statically unified vars: %d\n",
6408 stats
.unified_vars_static
);
6409 fprintf (outfile
, "Dynamically unified vars: %d\n",
6410 stats
.unified_vars_dynamic
);
6411 fprintf (outfile
, "Iterations: %d\n", stats
.iterations
);
6412 fprintf (outfile
, "Number of edges: %d\n", stats
.num_edges
);
6413 fprintf (outfile
, "Number of implicit edges: %d\n",
6414 stats
.num_implicit_edges
);
6417 for (i
= 1; i
< varmap
.length (); i
++)
6419 varinfo_t vi
= get_varinfo (i
);
6420 if (!vi
->may_have_pointers
)
6422 dump_solution_for_var (outfile
, i
);
6427 /* Debug points-to information to stderr. */
6430 debug_sa_points_to_info (void)
6432 dump_sa_points_to_info (stderr
);
6436 /* Initialize the always-existing constraint variables for NULL
6437 ANYTHING, READONLY, and INTEGER */
6440 init_base_vars (void)
6442 struct constraint_expr lhs
, rhs
;
6443 varinfo_t var_anything
;
6444 varinfo_t var_nothing
;
6445 varinfo_t var_readonly
;
6446 varinfo_t var_escaped
;
6447 varinfo_t var_nonlocal
;
6448 varinfo_t var_storedanything
;
6449 varinfo_t var_integer
;
6451 /* Variable ID zero is reserved and should be NULL. */
6452 varmap
.safe_push (NULL
);
6454 /* Create the NULL variable, used to represent that a variable points
6456 var_nothing
= new_var_info (NULL_TREE
, "NULL");
6457 gcc_assert (var_nothing
->id
== nothing_id
);
6458 var_nothing
->is_artificial_var
= 1;
6459 var_nothing
->offset
= 0;
6460 var_nothing
->size
= ~0;
6461 var_nothing
->fullsize
= ~0;
6462 var_nothing
->is_special_var
= 1;
6463 var_nothing
->may_have_pointers
= 0;
6464 var_nothing
->is_global_var
= 0;
6466 /* Create the ANYTHING variable, used to represent that a variable
6467 points to some unknown piece of memory. */
6468 var_anything
= new_var_info (NULL_TREE
, "ANYTHING");
6469 gcc_assert (var_anything
->id
== anything_id
);
6470 var_anything
->is_artificial_var
= 1;
6471 var_anything
->size
= ~0;
6472 var_anything
->offset
= 0;
6473 var_anything
->fullsize
= ~0;
6474 var_anything
->is_special_var
= 1;
6476 /* Anything points to anything. This makes deref constraints just
6477 work in the presence of linked list and other p = *p type loops,
6478 by saying that *ANYTHING = ANYTHING. */
6480 lhs
.var
= anything_id
;
6482 rhs
.type
= ADDRESSOF
;
6483 rhs
.var
= anything_id
;
6486 /* This specifically does not use process_constraint because
6487 process_constraint ignores all anything = anything constraints, since all
6488 but this one are redundant. */
6489 constraints
.safe_push (new_constraint (lhs
, rhs
));
6491 /* Create the READONLY variable, used to represent that a variable
6492 points to readonly memory. */
6493 var_readonly
= new_var_info (NULL_TREE
, "READONLY");
6494 gcc_assert (var_readonly
->id
== readonly_id
);
6495 var_readonly
->is_artificial_var
= 1;
6496 var_readonly
->offset
= 0;
6497 var_readonly
->size
= ~0;
6498 var_readonly
->fullsize
= ~0;
6499 var_readonly
->is_special_var
= 1;
6501 /* readonly memory points to anything, in order to make deref
6502 easier. In reality, it points to anything the particular
6503 readonly variable can point to, but we don't track this
6506 lhs
.var
= readonly_id
;
6508 rhs
.type
= ADDRESSOF
;
6509 rhs
.var
= readonly_id
; /* FIXME */
6511 process_constraint (new_constraint (lhs
, rhs
));
6513 /* Create the ESCAPED variable, used to represent the set of escaped
6515 var_escaped
= new_var_info (NULL_TREE
, "ESCAPED");
6516 gcc_assert (var_escaped
->id
== escaped_id
);
6517 var_escaped
->is_artificial_var
= 1;
6518 var_escaped
->offset
= 0;
6519 var_escaped
->size
= ~0;
6520 var_escaped
->fullsize
= ~0;
6521 var_escaped
->is_special_var
= 0;
6523 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6525 var_nonlocal
= new_var_info (NULL_TREE
, "NONLOCAL");
6526 gcc_assert (var_nonlocal
->id
== nonlocal_id
);
6527 var_nonlocal
->is_artificial_var
= 1;
6528 var_nonlocal
->offset
= 0;
6529 var_nonlocal
->size
= ~0;
6530 var_nonlocal
->fullsize
= ~0;
6531 var_nonlocal
->is_special_var
= 1;
6533 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6535 lhs
.var
= escaped_id
;
6538 rhs
.var
= escaped_id
;
6540 process_constraint (new_constraint (lhs
, rhs
));
6542 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6543 whole variable escapes. */
6545 lhs
.var
= escaped_id
;
6548 rhs
.var
= escaped_id
;
6549 rhs
.offset
= UNKNOWN_OFFSET
;
6550 process_constraint (new_constraint (lhs
, rhs
));
6552 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6553 everything pointed to by escaped points to what global memory can
6556 lhs
.var
= escaped_id
;
6559 rhs
.var
= nonlocal_id
;
6561 process_constraint (new_constraint (lhs
, rhs
));
6563 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6564 global memory may point to global memory and escaped memory. */
6566 lhs
.var
= nonlocal_id
;
6568 rhs
.type
= ADDRESSOF
;
6569 rhs
.var
= nonlocal_id
;
6571 process_constraint (new_constraint (lhs
, rhs
));
6572 rhs
.type
= ADDRESSOF
;
6573 rhs
.var
= escaped_id
;
6575 process_constraint (new_constraint (lhs
, rhs
));
6577 /* Create the STOREDANYTHING variable, used to represent the set of
6578 variables stored to *ANYTHING. */
6579 var_storedanything
= new_var_info (NULL_TREE
, "STOREDANYTHING");
6580 gcc_assert (var_storedanything
->id
== storedanything_id
);
6581 var_storedanything
->is_artificial_var
= 1;
6582 var_storedanything
->offset
= 0;
6583 var_storedanything
->size
= ~0;
6584 var_storedanything
->fullsize
= ~0;
6585 var_storedanything
->is_special_var
= 0;
6587 /* Create the INTEGER variable, used to represent that a variable points
6588 to what an INTEGER "points to". */
6589 var_integer
= new_var_info (NULL_TREE
, "INTEGER");
6590 gcc_assert (var_integer
->id
== integer_id
);
6591 var_integer
->is_artificial_var
= 1;
6592 var_integer
->size
= ~0;
6593 var_integer
->fullsize
= ~0;
6594 var_integer
->offset
= 0;
6595 var_integer
->is_special_var
= 1;
6597 /* INTEGER = ANYTHING, because we don't know where a dereference of
6598 a random integer will point to. */
6600 lhs
.var
= integer_id
;
6602 rhs
.type
= ADDRESSOF
;
6603 rhs
.var
= anything_id
;
6605 process_constraint (new_constraint (lhs
, rhs
));
6608 /* Initialize things necessary to perform PTA */
6611 init_alias_vars (void)
6613 use_field_sensitive
= (MAX_FIELDS_FOR_FIELD_SENSITIVE
> 1);
6615 bitmap_obstack_initialize (&pta_obstack
);
6616 bitmap_obstack_initialize (&oldpta_obstack
);
6617 bitmap_obstack_initialize (&predbitmap_obstack
);
6619 constraint_pool
= create_alloc_pool ("Constraint pool",
6620 sizeof (struct constraint
), 30);
6621 variable_info_pool
= create_alloc_pool ("Variable info pool",
6622 sizeof (struct variable_info
), 30);
6623 constraints
.create (8);
6625 vi_for_tree
= pointer_map_create ();
6626 call_stmt_vars
= pointer_map_create ();
6628 memset (&stats
, 0, sizeof (stats
));
6629 shared_bitmap_table
.create (511);
6632 gcc_obstack_init (&fake_var_decl_obstack
);
6634 final_solutions
= pointer_map_create ();
6635 gcc_obstack_init (&final_solutions_obstack
);
6638 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6639 predecessor edges. */
6642 remove_preds_and_fake_succs (constraint_graph_t graph
)
6646 /* Clear the implicit ref and address nodes from the successor
6648 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
6650 if (graph
->succs
[i
])
6651 bitmap_clear_range (graph
->succs
[i
], FIRST_REF_NODE
,
6652 FIRST_REF_NODE
* 2);
6655 /* Free the successor list for the non-ref nodes. */
6656 for (i
= FIRST_REF_NODE
+ 1; i
< graph
->size
; i
++)
6658 if (graph
->succs
[i
])
6659 BITMAP_FREE (graph
->succs
[i
]);
6662 /* Now reallocate the size of the successor list as, and blow away
6663 the predecessor bitmaps. */
6664 graph
->size
= varmap
.length ();
6665 graph
->succs
= XRESIZEVEC (bitmap
, graph
->succs
, graph
->size
);
6667 free (graph
->implicit_preds
);
6668 graph
->implicit_preds
= NULL
;
6669 free (graph
->preds
);
6670 graph
->preds
= NULL
;
6671 bitmap_obstack_release (&predbitmap_obstack
);
6674 /* Solve the constraint set. */
6677 solve_constraints (void)
6679 struct scc_info
*si
;
6683 "\nCollapsing static cycles and doing variable "
6686 init_graph (varmap
.length () * 2);
6689 fprintf (dump_file
, "Building predecessor graph\n");
6690 build_pred_graph ();
6693 fprintf (dump_file
, "Detecting pointer and location "
6695 si
= perform_var_substitution (graph
);
6698 fprintf (dump_file
, "Rewriting constraints and unifying "
6700 rewrite_constraints (graph
, si
);
6702 build_succ_graph ();
6704 free_var_substitution_info (si
);
6706 /* Attach complex constraints to graph nodes. */
6707 move_complex_constraints (graph
);
6710 fprintf (dump_file
, "Uniting pointer but not location equivalent "
6712 unite_pointer_equivalences (graph
);
6715 fprintf (dump_file
, "Finding indirect cycles\n");
6716 find_indirect_cycles (graph
);
6718 /* Implicit nodes and predecessors are no longer necessary at this
6720 remove_preds_and_fake_succs (graph
);
6722 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
6724 fprintf (dump_file
, "\n\n// The constraint graph before solve-graph "
6725 "in dot format:\n");
6726 dump_constraint_graph (dump_file
);
6727 fprintf (dump_file
, "\n\n");
6731 fprintf (dump_file
, "Solving graph\n");
6733 solve_graph (graph
);
6735 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
6737 fprintf (dump_file
, "\n\n// The constraint graph after solve-graph "
6738 "in dot format:\n");
6739 dump_constraint_graph (dump_file
);
6740 fprintf (dump_file
, "\n\n");
6744 dump_sa_points_to_info (dump_file
);
6747 /* Create points-to sets for the current function. See the comments
6748 at the start of the file for an algorithmic overview. */
6751 compute_points_to_sets (void)
6757 timevar_push (TV_TREE_PTA
);
6761 intra_create_variable_infos ();
6763 /* Now walk all statements and build the constraint set. */
6766 gimple_stmt_iterator gsi
;
6768 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6770 gimple phi
= gsi_stmt (gsi
);
6772 if (! virtual_operand_p (gimple_phi_result (phi
)))
6773 find_func_aliases (phi
);
6776 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6778 gimple stmt
= gsi_stmt (gsi
);
6780 find_func_aliases (stmt
);
6786 fprintf (dump_file
, "Points-to analysis\n\nConstraints:\n\n");
6787 dump_constraints (dump_file
, 0);
6790 /* From the constraints compute the points-to sets. */
6791 solve_constraints ();
6793 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6794 cfun
->gimple_df
->escaped
= find_what_var_points_to (get_varinfo (escaped_id
));
6796 /* Make sure the ESCAPED solution (which is used as placeholder in
6797 other solutions) does not reference itself. This simplifies
6798 points-to solution queries. */
6799 cfun
->gimple_df
->escaped
.escaped
= 0;
6801 /* Mark escaped HEAP variables as global. */
6802 FOR_EACH_VEC_ELT (varmap
, i
, vi
)
6805 && !vi
->is_global_var
)
6806 DECL_EXTERNAL (vi
->decl
) = vi
->is_global_var
6807 = pt_solution_includes (&cfun
->gimple_df
->escaped
, vi
->decl
);
6809 /* Compute the points-to sets for pointer SSA_NAMEs. */
6810 for (i
= 0; i
< num_ssa_names
; ++i
)
6812 tree ptr
= ssa_name (i
);
6814 && POINTER_TYPE_P (TREE_TYPE (ptr
)))
6815 find_what_p_points_to (ptr
);
6818 /* Compute the call-used/clobbered sets. */
6821 gimple_stmt_iterator gsi
;
6823 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6825 gimple stmt
= gsi_stmt (gsi
);
6826 struct pt_solution
*pt
;
6827 if (!is_gimple_call (stmt
))
6830 pt
= gimple_call_use_set (stmt
);
6831 if (gimple_call_flags (stmt
) & ECF_CONST
)
6832 memset (pt
, 0, sizeof (struct pt_solution
));
6833 else if ((vi
= lookup_call_use_vi (stmt
)) != NULL
)
6835 *pt
= find_what_var_points_to (vi
);
6836 /* Escaped (and thus nonlocal) variables are always
6837 implicitly used by calls. */
6838 /* ??? ESCAPED can be empty even though NONLOCAL
6845 /* If there is nothing special about this call then
6846 we have made everything that is used also escape. */
6847 *pt
= cfun
->gimple_df
->escaped
;
6851 pt
= gimple_call_clobber_set (stmt
);
6852 if (gimple_call_flags (stmt
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
6853 memset (pt
, 0, sizeof (struct pt_solution
));
6854 else if ((vi
= lookup_call_clobber_vi (stmt
)) != NULL
)
6856 *pt
= find_what_var_points_to (vi
);
6857 /* Escaped (and thus nonlocal) variables are always
6858 implicitly clobbered by calls. */
6859 /* ??? ESCAPED can be empty even though NONLOCAL
6866 /* If there is nothing special about this call then
6867 we have made everything that is used also escape. */
6868 *pt
= cfun
->gimple_df
->escaped
;
6874 timevar_pop (TV_TREE_PTA
);
6878 /* Delete created points-to sets. */
6881 delete_points_to_sets (void)
6885 shared_bitmap_table
.dispose ();
6886 if (dump_file
&& (dump_flags
& TDF_STATS
))
6887 fprintf (dump_file
, "Points to sets created:%d\n",
6888 stats
.points_to_sets_created
);
6890 pointer_map_destroy (vi_for_tree
);
6891 pointer_map_destroy (call_stmt_vars
);
6892 bitmap_obstack_release (&pta_obstack
);
6893 constraints
.release ();
6895 for (i
= 0; i
< graph
->size
; i
++)
6896 graph
->complex[i
].release ();
6897 free (graph
->complex);
6900 free (graph
->succs
);
6902 free (graph
->pe_rep
);
6903 free (graph
->indirect_cycles
);
6907 free_alloc_pool (variable_info_pool
);
6908 free_alloc_pool (constraint_pool
);
6910 obstack_free (&fake_var_decl_obstack
, NULL
);
6912 pointer_map_destroy (final_solutions
);
6913 obstack_free (&final_solutions_obstack
, NULL
);
6917 /* Compute points-to information for every SSA_NAME pointer in the
6918 current function and compute the transitive closure of escaped
6919 variables to re-initialize the call-clobber states of local variables. */
6922 compute_may_aliases (void)
6924 if (cfun
->gimple_df
->ipa_pta
)
6928 fprintf (dump_file
, "\nNot re-computing points-to information "
6929 "because IPA points-to information is available.\n\n");
6931 /* But still dump what we have remaining it. */
6932 dump_alias_info (dump_file
);
6938 /* For each pointer P_i, determine the sets of variables that P_i may
6939 point-to. Compute the reachability set of escaped and call-used
6941 compute_points_to_sets ();
6943 /* Debugging dumps. */
6945 dump_alias_info (dump_file
);
6947 /* Deallocate memory used by aliasing data structures and the internal
6948 points-to solution. */
6949 delete_points_to_sets ();
6951 gcc_assert (!need_ssa_update_p (cfun
));
6957 gate_tree_pta (void)
6959 return flag_tree_pta
;
6962 /* A dummy pass to cause points-to information to be computed via
6963 TODO_rebuild_alias. */
6967 const pass_data pass_data_build_alias
=
6969 GIMPLE_PASS
, /* type */
6971 OPTGROUP_NONE
, /* optinfo_flags */
6972 true, /* has_gate */
6973 false, /* has_execute */
6974 TV_NONE
, /* tv_id */
6975 ( PROP_cfg
| PROP_ssa
), /* properties_required */
6976 0, /* properties_provided */
6977 0, /* properties_destroyed */
6978 0, /* todo_flags_start */
6979 TODO_rebuild_alias
, /* todo_flags_finish */
6982 class pass_build_alias
: public gimple_opt_pass
6985 pass_build_alias (gcc::context
*ctxt
)
6986 : gimple_opt_pass (pass_data_build_alias
, ctxt
)
6989 /* opt_pass methods: */
6990 bool gate () { return gate_tree_pta (); }
6992 }; // class pass_build_alias
6997 make_pass_build_alias (gcc::context
*ctxt
)
6999 return new pass_build_alias (ctxt
);
7002 /* A dummy pass to cause points-to information to be computed via
7003 TODO_rebuild_alias. */
7007 const pass_data pass_data_build_ealias
=
7009 GIMPLE_PASS
, /* type */
7010 "ealias", /* name */
7011 OPTGROUP_NONE
, /* optinfo_flags */
7012 true, /* has_gate */
7013 false, /* has_execute */
7014 TV_NONE
, /* tv_id */
7015 ( PROP_cfg
| PROP_ssa
), /* properties_required */
7016 0, /* properties_provided */
7017 0, /* properties_destroyed */
7018 0, /* todo_flags_start */
7019 TODO_rebuild_alias
, /* todo_flags_finish */
7022 class pass_build_ealias
: public gimple_opt_pass
7025 pass_build_ealias (gcc::context
*ctxt
)
7026 : gimple_opt_pass (pass_data_build_ealias
, ctxt
)
7029 /* opt_pass methods: */
7030 bool gate () { return gate_tree_pta (); }
7032 }; // class pass_build_ealias
7037 make_pass_build_ealias (gcc::context
*ctxt
)
7039 return new pass_build_ealias (ctxt
);
7043 /* Return true if we should execute IPA PTA. */
7049 /* Don't bother doing anything if the program has errors. */
7053 /* IPA PTA solutions for ESCAPED. */
7054 struct pt_solution ipa_escaped_pt
7055 = { true, false, false, false, false, false, NULL
};
7057 /* Associate node with varinfo DATA. Worker for
7058 cgraph_for_node_and_aliases. */
7060 associate_varinfo_to_alias (struct cgraph_node
*node
, void *data
)
7062 if ((node
->alias
|| node
->thunk
.thunk_p
)
7064 insert_vi_for_tree (node
->decl
, (varinfo_t
)data
);
7068 /* Execute the driver for IPA PTA. */
7070 ipa_pta_execute (void)
7072 struct cgraph_node
*node
;
7073 struct varpool_node
*var
;
7080 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7082 dump_symtab (dump_file
);
7083 fprintf (dump_file
, "\n");
7086 /* Build the constraints. */
7087 FOR_EACH_DEFINED_FUNCTION (node
)
7090 /* Nodes without a body are not interesting. Especially do not
7091 visit clones at this point for now - we get duplicate decls
7092 there for inline clones at least. */
7093 if (!cgraph_function_with_gimple_body_p (node
) || node
->clone_of
)
7095 cgraph_get_body (node
);
7097 gcc_assert (!node
->clone_of
);
7099 vi
= create_function_info_for (node
->decl
,
7100 alias_get_name (node
->decl
));
7101 cgraph_for_node_and_aliases (node
, associate_varinfo_to_alias
, vi
, true);
7104 /* Create constraints for global variables and their initializers. */
7105 FOR_EACH_VARIABLE (var
)
7107 if (var
->alias
&& var
->analyzed
)
7110 get_vi_for_tree (var
->decl
);
7116 "Generating constraints for global initializers\n\n");
7117 dump_constraints (dump_file
, 0);
7118 fprintf (dump_file
, "\n");
7120 from
= constraints
.length ();
7122 FOR_EACH_DEFINED_FUNCTION (node
)
7124 struct function
*func
;
7127 /* Nodes without a body are not interesting. */
7128 if (!cgraph_function_with_gimple_body_p (node
) || node
->clone_of
)
7134 "Generating constraints for %s", cgraph_node_name (node
));
7135 if (DECL_ASSEMBLER_NAME_SET_P (node
->decl
))
7136 fprintf (dump_file
, " (%s)",
7138 (DECL_ASSEMBLER_NAME (node
->decl
)));
7139 fprintf (dump_file
, "\n");
7142 func
= DECL_STRUCT_FUNCTION (node
->decl
);
7145 /* For externally visible or attribute used annotated functions use
7146 local constraints for their arguments.
7147 For local functions we see all callers and thus do not need initial
7148 constraints for parameters. */
7149 if (node
->used_from_other_partition
7150 || node
->externally_visible
7151 || node
->force_output
)
7153 intra_create_variable_infos ();
7155 /* We also need to make function return values escape. Nothing
7156 escapes by returning from main though. */
7157 if (!MAIN_NAME_P (DECL_NAME (node
->decl
)))
7160 fi
= lookup_vi_for_tree (node
->decl
);
7161 rvi
= first_vi_for_offset (fi
, fi_result
);
7162 if (rvi
&& rvi
->offset
== fi_result
)
7164 struct constraint_expr includes
;
7165 struct constraint_expr var
;
7166 includes
.var
= escaped_id
;
7167 includes
.offset
= 0;
7168 includes
.type
= SCALAR
;
7172 process_constraint (new_constraint (includes
, var
));
7177 /* Build constriants for the function body. */
7178 FOR_EACH_BB_FN (bb
, func
)
7180 gimple_stmt_iterator gsi
;
7182 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
7185 gimple phi
= gsi_stmt (gsi
);
7187 if (! virtual_operand_p (gimple_phi_result (phi
)))
7188 find_func_aliases (phi
);
7191 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
7193 gimple stmt
= gsi_stmt (gsi
);
7195 find_func_aliases (stmt
);
7196 find_func_clobbers (stmt
);
7204 fprintf (dump_file
, "\n");
7205 dump_constraints (dump_file
, from
);
7206 fprintf (dump_file
, "\n");
7208 from
= constraints
.length ();
7211 /* From the constraints compute the points-to sets. */
7212 solve_constraints ();
7214 /* Compute the global points-to sets for ESCAPED.
7215 ??? Note that the computed escape set is not correct
7216 for the whole unit as we fail to consider graph edges to
7217 externally visible functions. */
7218 ipa_escaped_pt
= find_what_var_points_to (get_varinfo (escaped_id
));
7220 /* Make sure the ESCAPED solution (which is used as placeholder in
7221 other solutions) does not reference itself. This simplifies
7222 points-to solution queries. */
7223 ipa_escaped_pt
.ipa_escaped
= 0;
7225 /* Assign the points-to sets to the SSA names in the unit. */
7226 FOR_EACH_DEFINED_FUNCTION (node
)
7229 struct function
*fn
;
7233 struct pt_solution uses
, clobbers
;
7234 struct cgraph_edge
*e
;
7236 /* Nodes without a body are not interesting. */
7237 if (!cgraph_function_with_gimple_body_p (node
) || node
->clone_of
)
7240 fn
= DECL_STRUCT_FUNCTION (node
->decl
);
7242 /* Compute the points-to sets for pointer SSA_NAMEs. */
7243 FOR_EACH_VEC_ELT (*fn
->gimple_df
->ssa_names
, i
, ptr
)
7246 && POINTER_TYPE_P (TREE_TYPE (ptr
)))
7247 find_what_p_points_to (ptr
);
7250 /* Compute the call-use and call-clobber sets for all direct calls. */
7251 fi
= lookup_vi_for_tree (node
->decl
);
7252 gcc_assert (fi
->is_fn_info
);
7254 = find_what_var_points_to (first_vi_for_offset (fi
, fi_clobbers
));
7255 uses
= find_what_var_points_to (first_vi_for_offset (fi
, fi_uses
));
7256 for (e
= node
->callers
; e
; e
= e
->next_caller
)
7261 *gimple_call_clobber_set (e
->call_stmt
) = clobbers
;
7262 *gimple_call_use_set (e
->call_stmt
) = uses
;
7265 /* Compute the call-use and call-clobber sets for indirect calls
7266 and calls to external functions. */
7267 FOR_EACH_BB_FN (bb
, fn
)
7269 gimple_stmt_iterator gsi
;
7271 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
7273 gimple stmt
= gsi_stmt (gsi
);
7274 struct pt_solution
*pt
;
7278 if (!is_gimple_call (stmt
))
7281 /* Handle direct calls to external functions. */
7282 decl
= gimple_call_fndecl (stmt
);
7284 && (!(fi
= lookup_vi_for_tree (decl
))
7285 || !fi
->is_fn_info
))
7287 pt
= gimple_call_use_set (stmt
);
7288 if (gimple_call_flags (stmt
) & ECF_CONST
)
7289 memset (pt
, 0, sizeof (struct pt_solution
));
7290 else if ((vi
= lookup_call_use_vi (stmt
)) != NULL
)
7292 *pt
= find_what_var_points_to (vi
);
7293 /* Escaped (and thus nonlocal) variables are always
7294 implicitly used by calls. */
7295 /* ??? ESCAPED can be empty even though NONLOCAL
7298 pt
->ipa_escaped
= 1;
7302 /* If there is nothing special about this call then
7303 we have made everything that is used also escape. */
7304 *pt
= ipa_escaped_pt
;
7308 pt
= gimple_call_clobber_set (stmt
);
7309 if (gimple_call_flags (stmt
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
7310 memset (pt
, 0, sizeof (struct pt_solution
));
7311 else if ((vi
= lookup_call_clobber_vi (stmt
)) != NULL
)
7313 *pt
= find_what_var_points_to (vi
);
7314 /* Escaped (and thus nonlocal) variables are always
7315 implicitly clobbered by calls. */
7316 /* ??? ESCAPED can be empty even though NONLOCAL
7319 pt
->ipa_escaped
= 1;
7323 /* If there is nothing special about this call then
7324 we have made everything that is used also escape. */
7325 *pt
= ipa_escaped_pt
;
7330 /* Handle indirect calls. */
7332 && (fi
= get_fi_for_callee (stmt
)))
7334 /* We need to accumulate all clobbers/uses of all possible
7336 fi
= get_varinfo (find (fi
->id
));
7337 /* If we cannot constrain the set of functions we'll end up
7338 calling we end up using/clobbering everything. */
7339 if (bitmap_bit_p (fi
->solution
, anything_id
)
7340 || bitmap_bit_p (fi
->solution
, nonlocal_id
)
7341 || bitmap_bit_p (fi
->solution
, escaped_id
))
7343 pt_solution_reset (gimple_call_clobber_set (stmt
));
7344 pt_solution_reset (gimple_call_use_set (stmt
));
7350 struct pt_solution
*uses
, *clobbers
;
7352 uses
= gimple_call_use_set (stmt
);
7353 clobbers
= gimple_call_clobber_set (stmt
);
7354 memset (uses
, 0, sizeof (struct pt_solution
));
7355 memset (clobbers
, 0, sizeof (struct pt_solution
));
7356 EXECUTE_IF_SET_IN_BITMAP (fi
->solution
, 0, i
, bi
)
7358 struct pt_solution sol
;
7360 vi
= get_varinfo (i
);
7361 if (!vi
->is_fn_info
)
7363 /* ??? We could be more precise here? */
7365 uses
->ipa_escaped
= 1;
7366 clobbers
->nonlocal
= 1;
7367 clobbers
->ipa_escaped
= 1;
7371 if (!uses
->anything
)
7373 sol
= find_what_var_points_to
7374 (first_vi_for_offset (vi
, fi_uses
));
7375 pt_solution_ior_into (uses
, &sol
);
7377 if (!clobbers
->anything
)
7379 sol
= find_what_var_points_to
7380 (first_vi_for_offset (vi
, fi_clobbers
));
7381 pt_solution_ior_into (clobbers
, &sol
);
7389 fn
->gimple_df
->ipa_pta
= true;
7392 delete_points_to_sets ();
7401 const pass_data pass_data_ipa_pta
=
7403 SIMPLE_IPA_PASS
, /* type */
7405 OPTGROUP_NONE
, /* optinfo_flags */
7406 true, /* has_gate */
7407 true, /* has_execute */
7408 TV_IPA_PTA
, /* tv_id */
7409 0, /* properties_required */
7410 0, /* properties_provided */
7411 0, /* properties_destroyed */
7412 0, /* todo_flags_start */
7413 TODO_update_ssa
, /* todo_flags_finish */
7416 class pass_ipa_pta
: public simple_ipa_opt_pass
7419 pass_ipa_pta (gcc::context
*ctxt
)
7420 : simple_ipa_opt_pass (pass_data_ipa_pta
, ctxt
)
7423 /* opt_pass methods: */
7424 bool gate () { return gate_ipa_pta (); }
7425 unsigned int execute () { return ipa_pta_execute (); }
7427 }; // class pass_ipa_pta
7431 simple_ipa_opt_pass
*
7432 make_pass_ipa_pta (gcc::context
*ctxt
)
7434 return new pass_ipa_pta (ctxt
);