]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-structalias.c
* cgraph.h (symtab_node_base): Add definition, alias and analyzed
[thirdparty/gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "basic-block.h"
30 #include "tree.h"
31 #include "tree-flow.h"
32 #include "tree-inline.h"
33 #include "diagnostic-core.h"
34 #include "gimple.h"
35 #include "hash-table.h"
36 #include "function.h"
37 #include "cgraph.h"
38 #include "tree-pass.h"
39 #include "alloc-pool.h"
40 #include "splay-tree.h"
41 #include "params.h"
42 #include "cgraph.h"
43 #include "alias.h"
44 #include "pointer-set.h"
45
46 /* The idea behind this analyzer is to generate set constraints from the
47 program, then solve the resulting constraints in order to generate the
48 points-to sets.
49
50 Set constraints are a way of modeling program analysis problems that
51 involve sets. They consist of an inclusion constraint language,
52 describing the variables (each variable is a set) and operations that
53 are involved on the variables, and a set of rules that derive facts
54 from these operations. To solve a system of set constraints, you derive
55 all possible facts under the rules, which gives you the correct sets
56 as a consequence.
57
58 See "Efficient Field-sensitive pointer analysis for C" by "David
59 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
60 http://citeseer.ist.psu.edu/pearce04efficient.html
61
62 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
63 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
64 http://citeseer.ist.psu.edu/heintze01ultrafast.html
65
66 There are three types of real constraint expressions, DEREF,
67 ADDRESSOF, and SCALAR. Each constraint expression consists
68 of a constraint type, a variable, and an offset.
69
70 SCALAR is a constraint expression type used to represent x, whether
71 it appears on the LHS or the RHS of a statement.
72 DEREF is a constraint expression type used to represent *x, whether
73 it appears on the LHS or the RHS of a statement.
74 ADDRESSOF is a constraint expression used to represent &x, whether
75 it appears on the LHS or the RHS of a statement.
76
77 Each pointer variable in the program is assigned an integer id, and
78 each field of a structure variable is assigned an integer id as well.
79
80 Structure variables are linked to their list of fields through a "next
81 field" in each variable that points to the next field in offset
82 order.
83 Each variable for a structure field has
84
85 1. "size", that tells the size in bits of that field.
86 2. "fullsize, that tells the size in bits of the entire structure.
87 3. "offset", that tells the offset in bits from the beginning of the
88 structure to this field.
89
90 Thus,
91 struct f
92 {
93 int a;
94 int b;
95 } foo;
96 int *bar;
97
98 looks like
99
100 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
101 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
102 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
103
104
105 In order to solve the system of set constraints, the following is
106 done:
107
108 1. Each constraint variable x has a solution set associated with it,
109 Sol(x).
110
111 2. Constraints are separated into direct, copy, and complex.
112 Direct constraints are ADDRESSOF constraints that require no extra
113 processing, such as P = &Q
114 Copy constraints are those of the form P = Q.
115 Complex constraints are all the constraints involving dereferences
116 and offsets (including offsetted copies).
117
118 3. All direct constraints of the form P = &Q are processed, such
119 that Q is added to Sol(P)
120
121 4. All complex constraints for a given constraint variable are stored in a
122 linked list attached to that variable's node.
123
124 5. A directed graph is built out of the copy constraints. Each
125 constraint variable is a node in the graph, and an edge from
126 Q to P is added for each copy constraint of the form P = Q
127
128 6. The graph is then walked, and solution sets are
129 propagated along the copy edges, such that an edge from Q to P
130 causes Sol(P) <- Sol(P) union Sol(Q).
131
132 7. As we visit each node, all complex constraints associated with
133 that node are processed by adding appropriate copy edges to the graph, or the
134 appropriate variables to the solution set.
135
136 8. The process of walking the graph is iterated until no solution
137 sets change.
138
139 Prior to walking the graph in steps 6 and 7, We perform static
140 cycle elimination on the constraint graph, as well
141 as off-line variable substitution.
142
143 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
144 on and turned into anything), but isn't. You can just see what offset
145 inside the pointed-to struct it's going to access.
146
147 TODO: Constant bounded arrays can be handled as if they were structs of the
148 same number of elements.
149
150 TODO: Modeling heap and incoming pointers becomes much better if we
151 add fields to them as we discover them, which we could do.
152
153 TODO: We could handle unions, but to be honest, it's probably not
154 worth the pain or slowdown. */
155
156 /* IPA-PTA optimizations possible.
157
158 When the indirect function called is ANYTHING we can add disambiguation
159 based on the function signatures (or simply the parameter count which
160 is the varinfo size). We also do not need to consider functions that
161 do not have their address taken.
162
163 The is_global_var bit which marks escape points is overly conservative
164 in IPA mode. Split it to is_escape_point and is_global_var - only
165 externally visible globals are escape points in IPA mode. This is
166 also needed to fix the pt_solution_includes_global predicate
167 (and thus ptr_deref_may_alias_global_p).
168
169 The way we introduce DECL_PT_UID to avoid fixing up all points-to
170 sets in the translation unit when we copy a DECL during inlining
171 pessimizes precision. The advantage is that the DECL_PT_UID keeps
172 compile-time and memory usage overhead low - the points-to sets
173 do not grow or get unshared as they would during a fixup phase.
174 An alternative solution is to delay IPA PTA until after all
175 inlining transformations have been applied.
176
177 The way we propagate clobber/use information isn't optimized.
178 It should use a new complex constraint that properly filters
179 out local variables of the callee (though that would make
180 the sets invalid after inlining). OTOH we might as well
181 admit defeat to WHOPR and simply do all the clobber/use analysis
182 and propagation after PTA finished but before we threw away
183 points-to information for memory variables. WHOPR and PTA
184 do not play along well anyway - the whole constraint solving
185 would need to be done in WPA phase and it will be very interesting
186 to apply the results to local SSA names during LTRANS phase.
187
188 We probably should compute a per-function unit-ESCAPE solution
189 propagating it simply like the clobber / uses solutions. The
190 solution can go alongside the non-IPA espaced solution and be
191 used to query which vars escape the unit through a function.
192
193 We never put function decls in points-to sets so we do not
194 keep the set of called functions for indirect calls.
195
196 And probably more. */
197
198 static bool use_field_sensitive = true;
199 static int in_ipa_mode = 0;
200
201 /* Used for predecessor bitmaps. */
202 static bitmap_obstack predbitmap_obstack;
203
204 /* Used for points-to sets. */
205 static bitmap_obstack pta_obstack;
206
207 /* Used for oldsolution members of variables. */
208 static bitmap_obstack oldpta_obstack;
209
210 /* Used for per-solver-iteration bitmaps. */
211 static bitmap_obstack iteration_obstack;
212
213 static unsigned int create_variable_info_for (tree, const char *);
214 typedef struct constraint_graph *constraint_graph_t;
215 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
216
217 struct constraint;
218 typedef struct constraint *constraint_t;
219
220
221 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
222 if (a) \
223 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
224
225 static struct constraint_stats
226 {
227 unsigned int total_vars;
228 unsigned int nonpointer_vars;
229 unsigned int unified_vars_static;
230 unsigned int unified_vars_dynamic;
231 unsigned int iterations;
232 unsigned int num_edges;
233 unsigned int num_implicit_edges;
234 unsigned int points_to_sets_created;
235 } stats;
236
237 struct variable_info
238 {
239 /* ID of this variable */
240 unsigned int id;
241
242 /* True if this is a variable created by the constraint analysis, such as
243 heap variables and constraints we had to break up. */
244 unsigned int is_artificial_var : 1;
245
246 /* True if this is a special variable whose solution set should not be
247 changed. */
248 unsigned int is_special_var : 1;
249
250 /* True for variables whose size is not known or variable. */
251 unsigned int is_unknown_size_var : 1;
252
253 /* True for (sub-)fields that represent a whole variable. */
254 unsigned int is_full_var : 1;
255
256 /* True if this is a heap variable. */
257 unsigned int is_heap_var : 1;
258
259 /* True if this field may contain pointers. */
260 unsigned int may_have_pointers : 1;
261
262 /* True if this field has only restrict qualified pointers. */
263 unsigned int only_restrict_pointers : 1;
264
265 /* True if this represents a global variable. */
266 unsigned int is_global_var : 1;
267
268 /* True if this represents a IPA function info. */
269 unsigned int is_fn_info : 1;
270
271 /* The ID of the variable for the next field in this structure
272 or zero for the last field in this structure. */
273 unsigned next;
274
275 /* The ID of the variable for the first field in this structure. */
276 unsigned head;
277
278 /* Offset of this variable, in bits, from the base variable */
279 unsigned HOST_WIDE_INT offset;
280
281 /* Size of the variable, in bits. */
282 unsigned HOST_WIDE_INT size;
283
284 /* Full size of the base variable, in bits. */
285 unsigned HOST_WIDE_INT fullsize;
286
287 /* Name of this variable */
288 const char *name;
289
290 /* Tree that this variable is associated with. */
291 tree decl;
292
293 /* Points-to set for this variable. */
294 bitmap solution;
295
296 /* Old points-to set for this variable. */
297 bitmap oldsolution;
298 };
299 typedef struct variable_info *varinfo_t;
300
301 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
302 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
303 unsigned HOST_WIDE_INT);
304 static varinfo_t lookup_vi_for_tree (tree);
305 static inline bool type_can_have_subvars (const_tree);
306
307 /* Pool of variable info structures. */
308 static alloc_pool variable_info_pool;
309
310 /* Map varinfo to final pt_solution. */
311 static pointer_map_t *final_solutions;
312 struct obstack final_solutions_obstack;
313
314 /* Table of variable info structures for constraint variables.
315 Indexed directly by variable info id. */
316 static vec<varinfo_t> varmap;
317
318 /* Return the varmap element N */
319
320 static inline varinfo_t
321 get_varinfo (unsigned int n)
322 {
323 return varmap[n];
324 }
325
326 /* Return the next variable in the list of sub-variables of VI
327 or NULL if VI is the last sub-variable. */
328
329 static inline varinfo_t
330 vi_next (varinfo_t vi)
331 {
332 return get_varinfo (vi->next);
333 }
334
335 /* Static IDs for the special variables. Variable ID zero is unused
336 and used as terminator for the sub-variable chain. */
337 enum { nothing_id = 1, anything_id = 2, readonly_id = 3,
338 escaped_id = 4, nonlocal_id = 5,
339 storedanything_id = 6, integer_id = 7 };
340
341 /* Return a new variable info structure consisting for a variable
342 named NAME, and using constraint graph node NODE. Append it
343 to the vector of variable info structures. */
344
345 static varinfo_t
346 new_var_info (tree t, const char *name)
347 {
348 unsigned index = varmap.length ();
349 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
350
351 ret->id = index;
352 ret->name = name;
353 ret->decl = t;
354 /* Vars without decl are artificial and do not have sub-variables. */
355 ret->is_artificial_var = (t == NULL_TREE);
356 ret->is_special_var = false;
357 ret->is_unknown_size_var = false;
358 ret->is_full_var = (t == NULL_TREE);
359 ret->is_heap_var = false;
360 ret->may_have_pointers = true;
361 ret->only_restrict_pointers = false;
362 ret->is_global_var = (t == NULL_TREE);
363 ret->is_fn_info = false;
364 if (t && DECL_P (t))
365 ret->is_global_var = (is_global_var (t)
366 /* We have to treat even local register variables
367 as escape points. */
368 || (TREE_CODE (t) == VAR_DECL
369 && DECL_HARD_REGISTER (t)));
370 ret->solution = BITMAP_ALLOC (&pta_obstack);
371 ret->oldsolution = NULL;
372 ret->next = 0;
373 ret->head = ret->id;
374
375 stats.total_vars++;
376
377 varmap.safe_push (ret);
378
379 return ret;
380 }
381
382
383 /* A map mapping call statements to per-stmt variables for uses
384 and clobbers specific to the call. */
385 struct pointer_map_t *call_stmt_vars;
386
387 /* Lookup or create the variable for the call statement CALL. */
388
389 static varinfo_t
390 get_call_vi (gimple call)
391 {
392 void **slot_p;
393 varinfo_t vi, vi2;
394
395 slot_p = pointer_map_insert (call_stmt_vars, call);
396 if (*slot_p)
397 return (varinfo_t) *slot_p;
398
399 vi = new_var_info (NULL_TREE, "CALLUSED");
400 vi->offset = 0;
401 vi->size = 1;
402 vi->fullsize = 2;
403 vi->is_full_var = true;
404
405 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
406 vi2->offset = 1;
407 vi2->size = 1;
408 vi2->fullsize = 2;
409 vi2->is_full_var = true;
410
411 vi->next = vi2->id;
412
413 *slot_p = (void *) vi;
414 return vi;
415 }
416
417 /* Lookup the variable for the call statement CALL representing
418 the uses. Returns NULL if there is nothing special about this call. */
419
420 static varinfo_t
421 lookup_call_use_vi (gimple call)
422 {
423 void **slot_p;
424
425 slot_p = pointer_map_contains (call_stmt_vars, call);
426 if (slot_p)
427 return (varinfo_t) *slot_p;
428
429 return NULL;
430 }
431
432 /* Lookup the variable for the call statement CALL representing
433 the clobbers. Returns NULL if there is nothing special about this call. */
434
435 static varinfo_t
436 lookup_call_clobber_vi (gimple call)
437 {
438 varinfo_t uses = lookup_call_use_vi (call);
439 if (!uses)
440 return NULL;
441
442 return vi_next (uses);
443 }
444
445 /* Lookup or create the variable for the call statement CALL representing
446 the uses. */
447
448 static varinfo_t
449 get_call_use_vi (gimple call)
450 {
451 return get_call_vi (call);
452 }
453
454 /* Lookup or create the variable for the call statement CALL representing
455 the clobbers. */
456
457 static varinfo_t ATTRIBUTE_UNUSED
458 get_call_clobber_vi (gimple call)
459 {
460 return vi_next (get_call_vi (call));
461 }
462
463
464 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
465
466 /* An expression that appears in a constraint. */
467
468 struct constraint_expr
469 {
470 /* Constraint type. */
471 constraint_expr_type type;
472
473 /* Variable we are referring to in the constraint. */
474 unsigned int var;
475
476 /* Offset, in bits, of this constraint from the beginning of
477 variables it ends up referring to.
478
479 IOW, in a deref constraint, we would deref, get the result set,
480 then add OFFSET to each member. */
481 HOST_WIDE_INT offset;
482 };
483
484 /* Use 0x8000... as special unknown offset. */
485 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
486
487 typedef struct constraint_expr ce_s;
488 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
489 static void get_constraint_for (tree, vec<ce_s> *);
490 static void get_constraint_for_rhs (tree, vec<ce_s> *);
491 static void do_deref (vec<ce_s> *);
492
493 /* Our set constraints are made up of two constraint expressions, one
494 LHS, and one RHS.
495
496 As described in the introduction, our set constraints each represent an
497 operation between set valued variables.
498 */
499 struct constraint
500 {
501 struct constraint_expr lhs;
502 struct constraint_expr rhs;
503 };
504
505 /* List of constraints that we use to build the constraint graph from. */
506
507 static vec<constraint_t> constraints;
508 static alloc_pool constraint_pool;
509
510 /* The constraint graph is represented as an array of bitmaps
511 containing successor nodes. */
512
513 struct constraint_graph
514 {
515 /* Size of this graph, which may be different than the number of
516 nodes in the variable map. */
517 unsigned int size;
518
519 /* Explicit successors of each node. */
520 bitmap *succs;
521
522 /* Implicit predecessors of each node (Used for variable
523 substitution). */
524 bitmap *implicit_preds;
525
526 /* Explicit predecessors of each node (Used for variable substitution). */
527 bitmap *preds;
528
529 /* Indirect cycle representatives, or -1 if the node has no indirect
530 cycles. */
531 int *indirect_cycles;
532
533 /* Representative node for a node. rep[a] == a unless the node has
534 been unified. */
535 unsigned int *rep;
536
537 /* Equivalence class representative for a label. This is used for
538 variable substitution. */
539 int *eq_rep;
540
541 /* Pointer equivalence label for a node. All nodes with the same
542 pointer equivalence label can be unified together at some point
543 (either during constraint optimization or after the constraint
544 graph is built). */
545 unsigned int *pe;
546
547 /* Pointer equivalence representative for a label. This is used to
548 handle nodes that are pointer equivalent but not location
549 equivalent. We can unite these once the addressof constraints
550 are transformed into initial points-to sets. */
551 int *pe_rep;
552
553 /* Pointer equivalence label for each node, used during variable
554 substitution. */
555 unsigned int *pointer_label;
556
557 /* Location equivalence label for each node, used during location
558 equivalence finding. */
559 unsigned int *loc_label;
560
561 /* Pointed-by set for each node, used during location equivalence
562 finding. This is pointed-by rather than pointed-to, because it
563 is constructed using the predecessor graph. */
564 bitmap *pointed_by;
565
566 /* Points to sets for pointer equivalence. This is *not* the actual
567 points-to sets for nodes. */
568 bitmap *points_to;
569
570 /* Bitmap of nodes where the bit is set if the node is a direct
571 node. Used for variable substitution. */
572 sbitmap direct_nodes;
573
574 /* Bitmap of nodes where the bit is set if the node is address
575 taken. Used for variable substitution. */
576 bitmap address_taken;
577
578 /* Vector of complex constraints for each graph node. Complex
579 constraints are those involving dereferences or offsets that are
580 not 0. */
581 vec<constraint_t> *complex;
582 };
583
584 static constraint_graph_t graph;
585
586 /* During variable substitution and the offline version of indirect
587 cycle finding, we create nodes to represent dereferences and
588 address taken constraints. These represent where these start and
589 end. */
590 #define FIRST_REF_NODE (varmap).length ()
591 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
592
593 /* Return the representative node for NODE, if NODE has been unioned
594 with another NODE.
595 This function performs path compression along the way to finding
596 the representative. */
597
598 static unsigned int
599 find (unsigned int node)
600 {
601 gcc_checking_assert (node < graph->size);
602 if (graph->rep[node] != node)
603 return graph->rep[node] = find (graph->rep[node]);
604 return node;
605 }
606
607 /* Union the TO and FROM nodes to the TO nodes.
608 Note that at some point in the future, we may want to do
609 union-by-rank, in which case we are going to have to return the
610 node we unified to. */
611
612 static bool
613 unite (unsigned int to, unsigned int from)
614 {
615 gcc_checking_assert (to < graph->size && from < graph->size);
616 if (to != from && graph->rep[from] != to)
617 {
618 graph->rep[from] = to;
619 return true;
620 }
621 return false;
622 }
623
624 /* Create a new constraint consisting of LHS and RHS expressions. */
625
626 static constraint_t
627 new_constraint (const struct constraint_expr lhs,
628 const struct constraint_expr rhs)
629 {
630 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
631 ret->lhs = lhs;
632 ret->rhs = rhs;
633 return ret;
634 }
635
636 /* Print out constraint C to FILE. */
637
638 static void
639 dump_constraint (FILE *file, constraint_t c)
640 {
641 if (c->lhs.type == ADDRESSOF)
642 fprintf (file, "&");
643 else if (c->lhs.type == DEREF)
644 fprintf (file, "*");
645 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
646 if (c->lhs.offset == UNKNOWN_OFFSET)
647 fprintf (file, " + UNKNOWN");
648 else if (c->lhs.offset != 0)
649 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
650 fprintf (file, " = ");
651 if (c->rhs.type == ADDRESSOF)
652 fprintf (file, "&");
653 else if (c->rhs.type == DEREF)
654 fprintf (file, "*");
655 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
656 if (c->rhs.offset == UNKNOWN_OFFSET)
657 fprintf (file, " + UNKNOWN");
658 else if (c->rhs.offset != 0)
659 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
660 }
661
662
663 void debug_constraint (constraint_t);
664 void debug_constraints (void);
665 void debug_constraint_graph (void);
666 void debug_solution_for_var (unsigned int);
667 void debug_sa_points_to_info (void);
668
669 /* Print out constraint C to stderr. */
670
671 DEBUG_FUNCTION void
672 debug_constraint (constraint_t c)
673 {
674 dump_constraint (stderr, c);
675 fprintf (stderr, "\n");
676 }
677
678 /* Print out all constraints to FILE */
679
680 static void
681 dump_constraints (FILE *file, int from)
682 {
683 int i;
684 constraint_t c;
685 for (i = from; constraints.iterate (i, &c); i++)
686 if (c)
687 {
688 dump_constraint (file, c);
689 fprintf (file, "\n");
690 }
691 }
692
693 /* Print out all constraints to stderr. */
694
695 DEBUG_FUNCTION void
696 debug_constraints (void)
697 {
698 dump_constraints (stderr, 0);
699 }
700
701 /* Print the constraint graph in dot format. */
702
703 static void
704 dump_constraint_graph (FILE *file)
705 {
706 unsigned int i;
707
708 /* Only print the graph if it has already been initialized: */
709 if (!graph)
710 return;
711
712 /* Prints the header of the dot file: */
713 fprintf (file, "strict digraph {\n");
714 fprintf (file, " node [\n shape = box\n ]\n");
715 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
716 fprintf (file, "\n // List of nodes and complex constraints in "
717 "the constraint graph:\n");
718
719 /* The next lines print the nodes in the graph together with the
720 complex constraints attached to them. */
721 for (i = 1; i < graph->size; i++)
722 {
723 if (i == FIRST_REF_NODE)
724 continue;
725 if (find (i) != i)
726 continue;
727 if (i < FIRST_REF_NODE)
728 fprintf (file, "\"%s\"", get_varinfo (i)->name);
729 else
730 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
731 if (graph->complex[i].exists ())
732 {
733 unsigned j;
734 constraint_t c;
735 fprintf (file, " [label=\"\\N\\n");
736 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
737 {
738 dump_constraint (file, c);
739 fprintf (file, "\\l");
740 }
741 fprintf (file, "\"]");
742 }
743 fprintf (file, ";\n");
744 }
745
746 /* Go over the edges. */
747 fprintf (file, "\n // Edges in the constraint graph:\n");
748 for (i = 1; i < graph->size; i++)
749 {
750 unsigned j;
751 bitmap_iterator bi;
752 if (find (i) != i)
753 continue;
754 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
755 {
756 unsigned to = find (j);
757 if (i == to)
758 continue;
759 if (i < FIRST_REF_NODE)
760 fprintf (file, "\"%s\"", get_varinfo (i)->name);
761 else
762 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
763 fprintf (file, " -> ");
764 if (to < FIRST_REF_NODE)
765 fprintf (file, "\"%s\"", get_varinfo (to)->name);
766 else
767 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
768 fprintf (file, ";\n");
769 }
770 }
771
772 /* Prints the tail of the dot file. */
773 fprintf (file, "}\n");
774 }
775
776 /* Print out the constraint graph to stderr. */
777
778 DEBUG_FUNCTION void
779 debug_constraint_graph (void)
780 {
781 dump_constraint_graph (stderr);
782 }
783
784 /* SOLVER FUNCTIONS
785
786 The solver is a simple worklist solver, that works on the following
787 algorithm:
788
789 sbitmap changed_nodes = all zeroes;
790 changed_count = 0;
791 For each node that is not already collapsed:
792 changed_count++;
793 set bit in changed nodes
794
795 while (changed_count > 0)
796 {
797 compute topological ordering for constraint graph
798
799 find and collapse cycles in the constraint graph (updating
800 changed if necessary)
801
802 for each node (n) in the graph in topological order:
803 changed_count--;
804
805 Process each complex constraint associated with the node,
806 updating changed if necessary.
807
808 For each outgoing edge from n, propagate the solution from n to
809 the destination of the edge, updating changed as necessary.
810
811 } */
812
813 /* Return true if two constraint expressions A and B are equal. */
814
815 static bool
816 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
817 {
818 return a.type == b.type && a.var == b.var && a.offset == b.offset;
819 }
820
821 /* Return true if constraint expression A is less than constraint expression
822 B. This is just arbitrary, but consistent, in order to give them an
823 ordering. */
824
825 static bool
826 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
827 {
828 if (a.type == b.type)
829 {
830 if (a.var == b.var)
831 return a.offset < b.offset;
832 else
833 return a.var < b.var;
834 }
835 else
836 return a.type < b.type;
837 }
838
839 /* Return true if constraint A is less than constraint B. This is just
840 arbitrary, but consistent, in order to give them an ordering. */
841
842 static bool
843 constraint_less (const constraint_t &a, const constraint_t &b)
844 {
845 if (constraint_expr_less (a->lhs, b->lhs))
846 return true;
847 else if (constraint_expr_less (b->lhs, a->lhs))
848 return false;
849 else
850 return constraint_expr_less (a->rhs, b->rhs);
851 }
852
853 /* Return true if two constraints A and B are equal. */
854
855 static bool
856 constraint_equal (struct constraint a, struct constraint b)
857 {
858 return constraint_expr_equal (a.lhs, b.lhs)
859 && constraint_expr_equal (a.rhs, b.rhs);
860 }
861
862
863 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
864
865 static constraint_t
866 constraint_vec_find (vec<constraint_t> vec,
867 struct constraint lookfor)
868 {
869 unsigned int place;
870 constraint_t found;
871
872 if (!vec.exists ())
873 return NULL;
874
875 place = vec.lower_bound (&lookfor, constraint_less);
876 if (place >= vec.length ())
877 return NULL;
878 found = vec[place];
879 if (!constraint_equal (*found, lookfor))
880 return NULL;
881 return found;
882 }
883
884 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
885
886 static void
887 constraint_set_union (vec<constraint_t> *to,
888 vec<constraint_t> *from)
889 {
890 int i;
891 constraint_t c;
892
893 FOR_EACH_VEC_ELT (*from, i, c)
894 {
895 if (constraint_vec_find (*to, *c) == NULL)
896 {
897 unsigned int place = to->lower_bound (c, constraint_less);
898 to->safe_insert (place, c);
899 }
900 }
901 }
902
903 /* Expands the solution in SET to all sub-fields of variables included. */
904
905 static void
906 solution_set_expand (bitmap set)
907 {
908 bitmap_iterator bi;
909 unsigned j;
910
911 /* In a first pass expand to the head of the variables we need to
912 add all sub-fields off. This avoids quadratic behavior. */
913 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
914 {
915 varinfo_t v = get_varinfo (j);
916 if (v->is_artificial_var
917 || v->is_full_var)
918 continue;
919 bitmap_set_bit (set, v->head);
920 }
921
922 /* In the second pass now expand all head variables with subfields. */
923 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
924 {
925 varinfo_t v = get_varinfo (j);
926 if (v->is_artificial_var
927 || v->is_full_var
928 || v->head != j)
929 continue;
930 for (v = vi_next (v); v != NULL; v = vi_next (v))
931 bitmap_set_bit (set, v->id);
932 }
933 }
934
935 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
936 process. */
937
938 static bool
939 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
940 {
941 bool changed = false;
942 bitmap_iterator bi;
943 unsigned int i;
944
945 /* If the solution of FROM contains anything it is good enough to transfer
946 this to TO. */
947 if (bitmap_bit_p (from, anything_id))
948 return bitmap_set_bit (to, anything_id);
949
950 /* For zero offset simply union the solution into the destination. */
951 if (inc == 0)
952 return bitmap_ior_into (to, from);
953
954 /* If the offset is unknown we have to expand the solution to
955 all subfields. */
956 if (inc == UNKNOWN_OFFSET)
957 {
958 bitmap tmp = BITMAP_ALLOC (&iteration_obstack);
959 bitmap_copy (tmp, from);
960 solution_set_expand (tmp);
961 changed |= bitmap_ior_into (to, tmp);
962 BITMAP_FREE (tmp);
963 return changed;
964 }
965
966 /* For non-zero offset union the offsetted solution into the destination. */
967 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
968 {
969 varinfo_t vi = get_varinfo (i);
970
971 /* If this is a variable with just one field just set its bit
972 in the result. */
973 if (vi->is_artificial_var
974 || vi->is_unknown_size_var
975 || vi->is_full_var)
976 changed |= bitmap_set_bit (to, i);
977 else
978 {
979 unsigned HOST_WIDE_INT fieldoffset = vi->offset + inc;
980
981 /* If the offset makes the pointer point to before the
982 variable use offset zero for the field lookup. */
983 if (inc < 0
984 && fieldoffset > vi->offset)
985 fieldoffset = 0;
986
987 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
988
989 changed |= bitmap_set_bit (to, vi->id);
990 /* If the result is not exactly at fieldoffset include the next
991 field as well. See get_constraint_for_ptr_offset for more
992 rationale. */
993 if (vi->offset != fieldoffset
994 && vi->next != 0)
995 changed |= bitmap_set_bit (to, vi->next);
996 }
997 }
998
999 return changed;
1000 }
1001
1002 /* Insert constraint C into the list of complex constraints for graph
1003 node VAR. */
1004
1005 static void
1006 insert_into_complex (constraint_graph_t graph,
1007 unsigned int var, constraint_t c)
1008 {
1009 vec<constraint_t> complex = graph->complex[var];
1010 unsigned int place = complex.lower_bound (c, constraint_less);
1011
1012 /* Only insert constraints that do not already exist. */
1013 if (place >= complex.length ()
1014 || !constraint_equal (*c, *complex[place]))
1015 graph->complex[var].safe_insert (place, c);
1016 }
1017
1018
1019 /* Condense two variable nodes into a single variable node, by moving
1020 all associated info from SRC to TO. */
1021
1022 static void
1023 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1024 unsigned int from)
1025 {
1026 unsigned int i;
1027 constraint_t c;
1028
1029 gcc_checking_assert (find (from) == to);
1030
1031 /* Move all complex constraints from src node into to node */
1032 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1033 {
1034 /* In complex constraints for node src, we may have either
1035 a = *src, and *src = a, or an offseted constraint which are
1036 always added to the rhs node's constraints. */
1037
1038 if (c->rhs.type == DEREF)
1039 c->rhs.var = to;
1040 else if (c->lhs.type == DEREF)
1041 c->lhs.var = to;
1042 else
1043 c->rhs.var = to;
1044 }
1045 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1046 graph->complex[from].release ();
1047 }
1048
1049
1050 /* Remove edges involving NODE from GRAPH. */
1051
1052 static void
1053 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1054 {
1055 if (graph->succs[node])
1056 BITMAP_FREE (graph->succs[node]);
1057 }
1058
1059 /* Merge GRAPH nodes FROM and TO into node TO. */
1060
1061 static void
1062 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1063 unsigned int from)
1064 {
1065 if (graph->indirect_cycles[from] != -1)
1066 {
1067 /* If we have indirect cycles with the from node, and we have
1068 none on the to node, the to node has indirect cycles from the
1069 from node now that they are unified.
1070 If indirect cycles exist on both, unify the nodes that they
1071 are in a cycle with, since we know they are in a cycle with
1072 each other. */
1073 if (graph->indirect_cycles[to] == -1)
1074 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1075 }
1076
1077 /* Merge all the successor edges. */
1078 if (graph->succs[from])
1079 {
1080 if (!graph->succs[to])
1081 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1082 bitmap_ior_into (graph->succs[to],
1083 graph->succs[from]);
1084 }
1085
1086 clear_edges_for_node (graph, from);
1087 }
1088
1089
1090 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1091 it doesn't exist in the graph already. */
1092
1093 static void
1094 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1095 unsigned int from)
1096 {
1097 if (to == from)
1098 return;
1099
1100 if (!graph->implicit_preds[to])
1101 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1102
1103 if (bitmap_set_bit (graph->implicit_preds[to], from))
1104 stats.num_implicit_edges++;
1105 }
1106
1107 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1108 it doesn't exist in the graph already.
1109 Return false if the edge already existed, true otherwise. */
1110
1111 static void
1112 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1113 unsigned int from)
1114 {
1115 if (!graph->preds[to])
1116 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1117 bitmap_set_bit (graph->preds[to], from);
1118 }
1119
1120 /* Add a graph edge to GRAPH, going from FROM to TO if
1121 it doesn't exist in the graph already.
1122 Return false if the edge already existed, true otherwise. */
1123
1124 static bool
1125 add_graph_edge (constraint_graph_t graph, unsigned int to,
1126 unsigned int from)
1127 {
1128 if (to == from)
1129 {
1130 return false;
1131 }
1132 else
1133 {
1134 bool r = false;
1135
1136 if (!graph->succs[from])
1137 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1138 if (bitmap_set_bit (graph->succs[from], to))
1139 {
1140 r = true;
1141 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1142 stats.num_edges++;
1143 }
1144 return r;
1145 }
1146 }
1147
1148
1149 /* Initialize the constraint graph structure to contain SIZE nodes. */
1150
1151 static void
1152 init_graph (unsigned int size)
1153 {
1154 unsigned int j;
1155
1156 graph = XCNEW (struct constraint_graph);
1157 graph->size = size;
1158 graph->succs = XCNEWVEC (bitmap, graph->size);
1159 graph->indirect_cycles = XNEWVEC (int, graph->size);
1160 graph->rep = XNEWVEC (unsigned int, graph->size);
1161 /* ??? Macros do not support template types with multiple arguments,
1162 so we use a typedef to work around it. */
1163 typedef vec<constraint_t> vec_constraint_t_heap;
1164 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1165 graph->pe = XCNEWVEC (unsigned int, graph->size);
1166 graph->pe_rep = XNEWVEC (int, graph->size);
1167
1168 for (j = 0; j < graph->size; j++)
1169 {
1170 graph->rep[j] = j;
1171 graph->pe_rep[j] = -1;
1172 graph->indirect_cycles[j] = -1;
1173 }
1174 }
1175
1176 /* Build the constraint graph, adding only predecessor edges right now. */
1177
1178 static void
1179 build_pred_graph (void)
1180 {
1181 int i;
1182 constraint_t c;
1183 unsigned int j;
1184
1185 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1186 graph->preds = XCNEWVEC (bitmap, graph->size);
1187 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1188 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1189 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1190 graph->points_to = XCNEWVEC (bitmap, graph->size);
1191 graph->eq_rep = XNEWVEC (int, graph->size);
1192 graph->direct_nodes = sbitmap_alloc (graph->size);
1193 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1194 bitmap_clear (graph->direct_nodes);
1195
1196 for (j = 1; j < FIRST_REF_NODE; j++)
1197 {
1198 if (!get_varinfo (j)->is_special_var)
1199 bitmap_set_bit (graph->direct_nodes, j);
1200 }
1201
1202 for (j = 0; j < graph->size; j++)
1203 graph->eq_rep[j] = -1;
1204
1205 for (j = 0; j < varmap.length (); j++)
1206 graph->indirect_cycles[j] = -1;
1207
1208 FOR_EACH_VEC_ELT (constraints, i, c)
1209 {
1210 struct constraint_expr lhs = c->lhs;
1211 struct constraint_expr rhs = c->rhs;
1212 unsigned int lhsvar = lhs.var;
1213 unsigned int rhsvar = rhs.var;
1214
1215 if (lhs.type == DEREF)
1216 {
1217 /* *x = y. */
1218 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1219 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1220 }
1221 else if (rhs.type == DEREF)
1222 {
1223 /* x = *y */
1224 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1225 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1226 else
1227 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1228 }
1229 else if (rhs.type == ADDRESSOF)
1230 {
1231 varinfo_t v;
1232
1233 /* x = &y */
1234 if (graph->points_to[lhsvar] == NULL)
1235 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1236 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1237
1238 if (graph->pointed_by[rhsvar] == NULL)
1239 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1240 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1241
1242 /* Implicitly, *x = y */
1243 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1244
1245 /* All related variables are no longer direct nodes. */
1246 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1247 v = get_varinfo (rhsvar);
1248 if (!v->is_full_var)
1249 {
1250 v = get_varinfo (v->head);
1251 do
1252 {
1253 bitmap_clear_bit (graph->direct_nodes, v->id);
1254 v = vi_next (v);
1255 }
1256 while (v != NULL);
1257 }
1258 bitmap_set_bit (graph->address_taken, rhsvar);
1259 }
1260 else if (lhsvar > anything_id
1261 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1262 {
1263 /* x = y */
1264 add_pred_graph_edge (graph, lhsvar, rhsvar);
1265 /* Implicitly, *x = *y */
1266 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1267 FIRST_REF_NODE + rhsvar);
1268 }
1269 else if (lhs.offset != 0 || rhs.offset != 0)
1270 {
1271 if (rhs.offset != 0)
1272 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1273 else if (lhs.offset != 0)
1274 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1275 }
1276 }
1277 }
1278
1279 /* Build the constraint graph, adding successor edges. */
1280
1281 static void
1282 build_succ_graph (void)
1283 {
1284 unsigned i, t;
1285 constraint_t c;
1286
1287 FOR_EACH_VEC_ELT (constraints, i, c)
1288 {
1289 struct constraint_expr lhs;
1290 struct constraint_expr rhs;
1291 unsigned int lhsvar;
1292 unsigned int rhsvar;
1293
1294 if (!c)
1295 continue;
1296
1297 lhs = c->lhs;
1298 rhs = c->rhs;
1299 lhsvar = find (lhs.var);
1300 rhsvar = find (rhs.var);
1301
1302 if (lhs.type == DEREF)
1303 {
1304 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1305 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1306 }
1307 else if (rhs.type == DEREF)
1308 {
1309 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1310 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1311 }
1312 else if (rhs.type == ADDRESSOF)
1313 {
1314 /* x = &y */
1315 gcc_checking_assert (find (rhs.var) == rhs.var);
1316 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1317 }
1318 else if (lhsvar > anything_id
1319 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1320 {
1321 add_graph_edge (graph, lhsvar, rhsvar);
1322 }
1323 }
1324
1325 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1326 receive pointers. */
1327 t = find (storedanything_id);
1328 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1329 {
1330 if (!bitmap_bit_p (graph->direct_nodes, i)
1331 && get_varinfo (i)->may_have_pointers)
1332 add_graph_edge (graph, find (i), t);
1333 }
1334
1335 /* Everything stored to ANYTHING also potentially escapes. */
1336 add_graph_edge (graph, find (escaped_id), t);
1337 }
1338
1339
1340 /* Changed variables on the last iteration. */
1341 static bitmap changed;
1342
1343 /* Strongly Connected Component visitation info. */
1344
1345 struct scc_info
1346 {
1347 sbitmap visited;
1348 sbitmap deleted;
1349 unsigned int *dfs;
1350 unsigned int *node_mapping;
1351 int current_index;
1352 vec<unsigned> scc_stack;
1353 };
1354
1355
1356 /* Recursive routine to find strongly connected components in GRAPH.
1357 SI is the SCC info to store the information in, and N is the id of current
1358 graph node we are processing.
1359
1360 This is Tarjan's strongly connected component finding algorithm, as
1361 modified by Nuutila to keep only non-root nodes on the stack.
1362 The algorithm can be found in "On finding the strongly connected
1363 connected components in a directed graph" by Esko Nuutila and Eljas
1364 Soisalon-Soininen, in Information Processing Letters volume 49,
1365 number 1, pages 9-14. */
1366
1367 static void
1368 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1369 {
1370 unsigned int i;
1371 bitmap_iterator bi;
1372 unsigned int my_dfs;
1373
1374 bitmap_set_bit (si->visited, n);
1375 si->dfs[n] = si->current_index ++;
1376 my_dfs = si->dfs[n];
1377
1378 /* Visit all the successors. */
1379 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1380 {
1381 unsigned int w;
1382
1383 if (i > LAST_REF_NODE)
1384 break;
1385
1386 w = find (i);
1387 if (bitmap_bit_p (si->deleted, w))
1388 continue;
1389
1390 if (!bitmap_bit_p (si->visited, w))
1391 scc_visit (graph, si, w);
1392
1393 unsigned int t = find (w);
1394 gcc_checking_assert (find (n) == n);
1395 if (si->dfs[t] < si->dfs[n])
1396 si->dfs[n] = si->dfs[t];
1397 }
1398
1399 /* See if any components have been identified. */
1400 if (si->dfs[n] == my_dfs)
1401 {
1402 if (si->scc_stack.length () > 0
1403 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1404 {
1405 bitmap scc = BITMAP_ALLOC (NULL);
1406 unsigned int lowest_node;
1407 bitmap_iterator bi;
1408
1409 bitmap_set_bit (scc, n);
1410
1411 while (si->scc_stack.length () != 0
1412 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1413 {
1414 unsigned int w = si->scc_stack.pop ();
1415
1416 bitmap_set_bit (scc, w);
1417 }
1418
1419 lowest_node = bitmap_first_set_bit (scc);
1420 gcc_assert (lowest_node < FIRST_REF_NODE);
1421
1422 /* Collapse the SCC nodes into a single node, and mark the
1423 indirect cycles. */
1424 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1425 {
1426 if (i < FIRST_REF_NODE)
1427 {
1428 if (unite (lowest_node, i))
1429 unify_nodes (graph, lowest_node, i, false);
1430 }
1431 else
1432 {
1433 unite (lowest_node, i);
1434 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1435 }
1436 }
1437 }
1438 bitmap_set_bit (si->deleted, n);
1439 }
1440 else
1441 si->scc_stack.safe_push (n);
1442 }
1443
1444 /* Unify node FROM into node TO, updating the changed count if
1445 necessary when UPDATE_CHANGED is true. */
1446
1447 static void
1448 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1449 bool update_changed)
1450 {
1451 gcc_checking_assert (to != from && find (to) == to);
1452
1453 if (dump_file && (dump_flags & TDF_DETAILS))
1454 fprintf (dump_file, "Unifying %s to %s\n",
1455 get_varinfo (from)->name,
1456 get_varinfo (to)->name);
1457
1458 if (update_changed)
1459 stats.unified_vars_dynamic++;
1460 else
1461 stats.unified_vars_static++;
1462
1463 merge_graph_nodes (graph, to, from);
1464 merge_node_constraints (graph, to, from);
1465
1466 /* Mark TO as changed if FROM was changed. If TO was already marked
1467 as changed, decrease the changed count. */
1468
1469 if (update_changed
1470 && bitmap_clear_bit (changed, from))
1471 bitmap_set_bit (changed, to);
1472 varinfo_t fromvi = get_varinfo (from);
1473 if (fromvi->solution)
1474 {
1475 /* If the solution changes because of the merging, we need to mark
1476 the variable as changed. */
1477 varinfo_t tovi = get_varinfo (to);
1478 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1479 {
1480 if (update_changed)
1481 bitmap_set_bit (changed, to);
1482 }
1483
1484 BITMAP_FREE (fromvi->solution);
1485 if (fromvi->oldsolution)
1486 BITMAP_FREE (fromvi->oldsolution);
1487
1488 if (stats.iterations > 0
1489 && tovi->oldsolution)
1490 BITMAP_FREE (tovi->oldsolution);
1491 }
1492 if (graph->succs[to])
1493 bitmap_clear_bit (graph->succs[to], to);
1494 }
1495
1496 /* Information needed to compute the topological ordering of a graph. */
1497
1498 struct topo_info
1499 {
1500 /* sbitmap of visited nodes. */
1501 sbitmap visited;
1502 /* Array that stores the topological order of the graph, *in
1503 reverse*. */
1504 vec<unsigned> topo_order;
1505 };
1506
1507
1508 /* Initialize and return a topological info structure. */
1509
1510 static struct topo_info *
1511 init_topo_info (void)
1512 {
1513 size_t size = graph->size;
1514 struct topo_info *ti = XNEW (struct topo_info);
1515 ti->visited = sbitmap_alloc (size);
1516 bitmap_clear (ti->visited);
1517 ti->topo_order.create (1);
1518 return ti;
1519 }
1520
1521
1522 /* Free the topological sort info pointed to by TI. */
1523
1524 static void
1525 free_topo_info (struct topo_info *ti)
1526 {
1527 sbitmap_free (ti->visited);
1528 ti->topo_order.release ();
1529 free (ti);
1530 }
1531
1532 /* Visit the graph in topological order, and store the order in the
1533 topo_info structure. */
1534
1535 static void
1536 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1537 unsigned int n)
1538 {
1539 bitmap_iterator bi;
1540 unsigned int j;
1541
1542 bitmap_set_bit (ti->visited, n);
1543
1544 if (graph->succs[n])
1545 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1546 {
1547 if (!bitmap_bit_p (ti->visited, j))
1548 topo_visit (graph, ti, j);
1549 }
1550
1551 ti->topo_order.safe_push (n);
1552 }
1553
1554 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1555 starting solution for y. */
1556
1557 static void
1558 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1559 bitmap delta)
1560 {
1561 unsigned int lhs = c->lhs.var;
1562 bool flag = false;
1563 bitmap sol = get_varinfo (lhs)->solution;
1564 unsigned int j;
1565 bitmap_iterator bi;
1566 HOST_WIDE_INT roffset = c->rhs.offset;
1567
1568 /* Our IL does not allow this. */
1569 gcc_checking_assert (c->lhs.offset == 0);
1570
1571 /* If the solution of Y contains anything it is good enough to transfer
1572 this to the LHS. */
1573 if (bitmap_bit_p (delta, anything_id))
1574 {
1575 flag |= bitmap_set_bit (sol, anything_id);
1576 goto done;
1577 }
1578
1579 /* If we do not know at with offset the rhs is dereferenced compute
1580 the reachability set of DELTA, conservatively assuming it is
1581 dereferenced at all valid offsets. */
1582 if (roffset == UNKNOWN_OFFSET)
1583 {
1584 solution_set_expand (delta);
1585 /* No further offset processing is necessary. */
1586 roffset = 0;
1587 }
1588
1589 /* For each variable j in delta (Sol(y)), add
1590 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1591 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1592 {
1593 varinfo_t v = get_varinfo (j);
1594 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1595 unsigned int t;
1596
1597 if (v->is_full_var)
1598 fieldoffset = v->offset;
1599 else if (roffset != 0)
1600 v = first_vi_for_offset (v, fieldoffset);
1601 /* If the access is outside of the variable we can ignore it. */
1602 if (!v)
1603 continue;
1604
1605 do
1606 {
1607 t = find (v->id);
1608
1609 /* Adding edges from the special vars is pointless.
1610 They don't have sets that can change. */
1611 if (get_varinfo (t)->is_special_var)
1612 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1613 /* Merging the solution from ESCAPED needlessly increases
1614 the set. Use ESCAPED as representative instead. */
1615 else if (v->id == escaped_id)
1616 flag |= bitmap_set_bit (sol, escaped_id);
1617 else if (v->may_have_pointers
1618 && add_graph_edge (graph, lhs, t))
1619 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1620
1621 /* If the variable is not exactly at the requested offset
1622 we have to include the next one. */
1623 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1624 || v->next == 0)
1625 break;
1626
1627 v = vi_next (v);
1628 fieldoffset = v->offset;
1629 }
1630 while (1);
1631 }
1632
1633 done:
1634 /* If the LHS solution changed, mark the var as changed. */
1635 if (flag)
1636 {
1637 get_varinfo (lhs)->solution = sol;
1638 bitmap_set_bit (changed, lhs);
1639 }
1640 }
1641
1642 /* Process a constraint C that represents *(x + off) = y using DELTA
1643 as the starting solution for x. */
1644
1645 static void
1646 do_ds_constraint (constraint_t c, bitmap delta)
1647 {
1648 unsigned int rhs = c->rhs.var;
1649 bitmap sol = get_varinfo (rhs)->solution;
1650 unsigned int j;
1651 bitmap_iterator bi;
1652 HOST_WIDE_INT loff = c->lhs.offset;
1653 bool escaped_p = false;
1654
1655 /* Our IL does not allow this. */
1656 gcc_checking_assert (c->rhs.offset == 0);
1657
1658 /* If the solution of y contains ANYTHING simply use the ANYTHING
1659 solution. This avoids needlessly increasing the points-to sets. */
1660 if (bitmap_bit_p (sol, anything_id))
1661 sol = get_varinfo (find (anything_id))->solution;
1662
1663 /* If the solution for x contains ANYTHING we have to merge the
1664 solution of y into all pointer variables which we do via
1665 STOREDANYTHING. */
1666 if (bitmap_bit_p (delta, anything_id))
1667 {
1668 unsigned t = find (storedanything_id);
1669 if (add_graph_edge (graph, t, rhs))
1670 {
1671 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1672 bitmap_set_bit (changed, t);
1673 }
1674 return;
1675 }
1676
1677 /* If we do not know at with offset the rhs is dereferenced compute
1678 the reachability set of DELTA, conservatively assuming it is
1679 dereferenced at all valid offsets. */
1680 if (loff == UNKNOWN_OFFSET)
1681 {
1682 solution_set_expand (delta);
1683 loff = 0;
1684 }
1685
1686 /* For each member j of delta (Sol(x)), add an edge from y to j and
1687 union Sol(y) into Sol(j) */
1688 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1689 {
1690 varinfo_t v = get_varinfo (j);
1691 unsigned int t;
1692 HOST_WIDE_INT fieldoffset = v->offset + loff;
1693
1694 if (v->is_full_var)
1695 fieldoffset = v->offset;
1696 else if (loff != 0)
1697 v = first_vi_for_offset (v, fieldoffset);
1698 /* If the access is outside of the variable we can ignore it. */
1699 if (!v)
1700 continue;
1701
1702 do
1703 {
1704 if (v->may_have_pointers)
1705 {
1706 /* If v is a global variable then this is an escape point. */
1707 if (v->is_global_var
1708 && !escaped_p)
1709 {
1710 t = find (escaped_id);
1711 if (add_graph_edge (graph, t, rhs)
1712 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1713 bitmap_set_bit (changed, t);
1714 /* Enough to let rhs escape once. */
1715 escaped_p = true;
1716 }
1717
1718 if (v->is_special_var)
1719 break;
1720
1721 t = find (v->id);
1722 if (add_graph_edge (graph, t, rhs)
1723 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1724 bitmap_set_bit (changed, t);
1725 }
1726
1727 /* If the variable is not exactly at the requested offset
1728 we have to include the next one. */
1729 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1730 || v->next == 0)
1731 break;
1732
1733 v = vi_next (v);
1734 fieldoffset = v->offset;
1735 }
1736 while (1);
1737 }
1738 }
1739
1740 /* Handle a non-simple (simple meaning requires no iteration),
1741 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1742
1743 static void
1744 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1745 {
1746 if (c->lhs.type == DEREF)
1747 {
1748 if (c->rhs.type == ADDRESSOF)
1749 {
1750 gcc_unreachable();
1751 }
1752 else
1753 {
1754 /* *x = y */
1755 do_ds_constraint (c, delta);
1756 }
1757 }
1758 else if (c->rhs.type == DEREF)
1759 {
1760 /* x = *y */
1761 if (!(get_varinfo (c->lhs.var)->is_special_var))
1762 do_sd_constraint (graph, c, delta);
1763 }
1764 else
1765 {
1766 bitmap tmp;
1767 bitmap solution;
1768 bool flag = false;
1769
1770 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1771 solution = get_varinfo (c->rhs.var)->solution;
1772 tmp = get_varinfo (c->lhs.var)->solution;
1773
1774 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1775
1776 if (flag)
1777 bitmap_set_bit (changed, c->lhs.var);
1778 }
1779 }
1780
1781 /* Initialize and return a new SCC info structure. */
1782
1783 static struct scc_info *
1784 init_scc_info (size_t size)
1785 {
1786 struct scc_info *si = XNEW (struct scc_info);
1787 size_t i;
1788
1789 si->current_index = 0;
1790 si->visited = sbitmap_alloc (size);
1791 bitmap_clear (si->visited);
1792 si->deleted = sbitmap_alloc (size);
1793 bitmap_clear (si->deleted);
1794 si->node_mapping = XNEWVEC (unsigned int, size);
1795 si->dfs = XCNEWVEC (unsigned int, size);
1796
1797 for (i = 0; i < size; i++)
1798 si->node_mapping[i] = i;
1799
1800 si->scc_stack.create (1);
1801 return si;
1802 }
1803
1804 /* Free an SCC info structure pointed to by SI */
1805
1806 static void
1807 free_scc_info (struct scc_info *si)
1808 {
1809 sbitmap_free (si->visited);
1810 sbitmap_free (si->deleted);
1811 free (si->node_mapping);
1812 free (si->dfs);
1813 si->scc_stack.release ();
1814 free (si);
1815 }
1816
1817
1818 /* Find indirect cycles in GRAPH that occur, using strongly connected
1819 components, and note them in the indirect cycles map.
1820
1821 This technique comes from Ben Hardekopf and Calvin Lin,
1822 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1823 Lines of Code", submitted to PLDI 2007. */
1824
1825 static void
1826 find_indirect_cycles (constraint_graph_t graph)
1827 {
1828 unsigned int i;
1829 unsigned int size = graph->size;
1830 struct scc_info *si = init_scc_info (size);
1831
1832 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1833 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1834 scc_visit (graph, si, i);
1835
1836 free_scc_info (si);
1837 }
1838
1839 /* Compute a topological ordering for GRAPH, and store the result in the
1840 topo_info structure TI. */
1841
1842 static void
1843 compute_topo_order (constraint_graph_t graph,
1844 struct topo_info *ti)
1845 {
1846 unsigned int i;
1847 unsigned int size = graph->size;
1848
1849 for (i = 0; i != size; ++i)
1850 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1851 topo_visit (graph, ti, i);
1852 }
1853
1854 /* Structure used to for hash value numbering of pointer equivalence
1855 classes. */
1856
1857 typedef struct equiv_class_label
1858 {
1859 hashval_t hashcode;
1860 unsigned int equivalence_class;
1861 bitmap labels;
1862 } *equiv_class_label_t;
1863 typedef const struct equiv_class_label *const_equiv_class_label_t;
1864
1865 /* Equiv_class_label hashtable helpers. */
1866
1867 struct equiv_class_hasher : typed_free_remove <equiv_class_label>
1868 {
1869 typedef equiv_class_label value_type;
1870 typedef equiv_class_label compare_type;
1871 static inline hashval_t hash (const value_type *);
1872 static inline bool equal (const value_type *, const compare_type *);
1873 };
1874
1875 /* Hash function for a equiv_class_label_t */
1876
1877 inline hashval_t
1878 equiv_class_hasher::hash (const value_type *ecl)
1879 {
1880 return ecl->hashcode;
1881 }
1882
1883 /* Equality function for two equiv_class_label_t's. */
1884
1885 inline bool
1886 equiv_class_hasher::equal (const value_type *eql1, const compare_type *eql2)
1887 {
1888 return (eql1->hashcode == eql2->hashcode
1889 && bitmap_equal_p (eql1->labels, eql2->labels));
1890 }
1891
1892 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1893 classes. */
1894 static hash_table <equiv_class_hasher> pointer_equiv_class_table;
1895
1896 /* A hashtable for mapping a bitmap of labels->location equivalence
1897 classes. */
1898 static hash_table <equiv_class_hasher> location_equiv_class_table;
1899
1900 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1901 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1902 is equivalent to. */
1903
1904 static equiv_class_label *
1905 equiv_class_lookup_or_add (hash_table <equiv_class_hasher> table, bitmap labels)
1906 {
1907 equiv_class_label **slot;
1908 equiv_class_label ecl;
1909
1910 ecl.labels = labels;
1911 ecl.hashcode = bitmap_hash (labels);
1912 slot = table.find_slot_with_hash (&ecl, ecl.hashcode, INSERT);
1913 if (!*slot)
1914 {
1915 *slot = XNEW (struct equiv_class_label);
1916 (*slot)->labels = labels;
1917 (*slot)->hashcode = ecl.hashcode;
1918 (*slot)->equivalence_class = 0;
1919 }
1920
1921 return *slot;
1922 }
1923
1924 /* Perform offline variable substitution.
1925
1926 This is a worst case quadratic time way of identifying variables
1927 that must have equivalent points-to sets, including those caused by
1928 static cycles, and single entry subgraphs, in the constraint graph.
1929
1930 The technique is described in "Exploiting Pointer and Location
1931 Equivalence to Optimize Pointer Analysis. In the 14th International
1932 Static Analysis Symposium (SAS), August 2007." It is known as the
1933 "HU" algorithm, and is equivalent to value numbering the collapsed
1934 constraint graph including evaluating unions.
1935
1936 The general method of finding equivalence classes is as follows:
1937 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1938 Initialize all non-REF nodes to be direct nodes.
1939 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1940 variable}
1941 For each constraint containing the dereference, we also do the same
1942 thing.
1943
1944 We then compute SCC's in the graph and unify nodes in the same SCC,
1945 including pts sets.
1946
1947 For each non-collapsed node x:
1948 Visit all unvisited explicit incoming edges.
1949 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1950 where y->x.
1951 Lookup the equivalence class for pts(x).
1952 If we found one, equivalence_class(x) = found class.
1953 Otherwise, equivalence_class(x) = new class, and new_class is
1954 added to the lookup table.
1955
1956 All direct nodes with the same equivalence class can be replaced
1957 with a single representative node.
1958 All unlabeled nodes (label == 0) are not pointers and all edges
1959 involving them can be eliminated.
1960 We perform these optimizations during rewrite_constraints
1961
1962 In addition to pointer equivalence class finding, we also perform
1963 location equivalence class finding. This is the set of variables
1964 that always appear together in points-to sets. We use this to
1965 compress the size of the points-to sets. */
1966
1967 /* Current maximum pointer equivalence class id. */
1968 static int pointer_equiv_class;
1969
1970 /* Current maximum location equivalence class id. */
1971 static int location_equiv_class;
1972
1973 /* Recursive routine to find strongly connected components in GRAPH,
1974 and label it's nodes with DFS numbers. */
1975
1976 static void
1977 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1978 {
1979 unsigned int i;
1980 bitmap_iterator bi;
1981 unsigned int my_dfs;
1982
1983 gcc_checking_assert (si->node_mapping[n] == n);
1984 bitmap_set_bit (si->visited, n);
1985 si->dfs[n] = si->current_index ++;
1986 my_dfs = si->dfs[n];
1987
1988 /* Visit all the successors. */
1989 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1990 {
1991 unsigned int w = si->node_mapping[i];
1992
1993 if (bitmap_bit_p (si->deleted, w))
1994 continue;
1995
1996 if (!bitmap_bit_p (si->visited, w))
1997 condense_visit (graph, si, w);
1998
1999 unsigned int t = si->node_mapping[w];
2000 gcc_checking_assert (si->node_mapping[n] == n);
2001 if (si->dfs[t] < si->dfs[n])
2002 si->dfs[n] = si->dfs[t];
2003 }
2004
2005 /* Visit all the implicit predecessors. */
2006 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2007 {
2008 unsigned int w = si->node_mapping[i];
2009
2010 if (bitmap_bit_p (si->deleted, w))
2011 continue;
2012
2013 if (!bitmap_bit_p (si->visited, w))
2014 condense_visit (graph, si, w);
2015
2016 unsigned int t = si->node_mapping[w];
2017 gcc_assert (si->node_mapping[n] == n);
2018 if (si->dfs[t] < si->dfs[n])
2019 si->dfs[n] = si->dfs[t];
2020 }
2021
2022 /* See if any components have been identified. */
2023 if (si->dfs[n] == my_dfs)
2024 {
2025 while (si->scc_stack.length () != 0
2026 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2027 {
2028 unsigned int w = si->scc_stack.pop ();
2029 si->node_mapping[w] = n;
2030
2031 if (!bitmap_bit_p (graph->direct_nodes, w))
2032 bitmap_clear_bit (graph->direct_nodes, n);
2033
2034 /* Unify our nodes. */
2035 if (graph->preds[w])
2036 {
2037 if (!graph->preds[n])
2038 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2039 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2040 }
2041 if (graph->implicit_preds[w])
2042 {
2043 if (!graph->implicit_preds[n])
2044 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2045 bitmap_ior_into (graph->implicit_preds[n],
2046 graph->implicit_preds[w]);
2047 }
2048 if (graph->points_to[w])
2049 {
2050 if (!graph->points_to[n])
2051 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2052 bitmap_ior_into (graph->points_to[n],
2053 graph->points_to[w]);
2054 }
2055 }
2056 bitmap_set_bit (si->deleted, n);
2057 }
2058 else
2059 si->scc_stack.safe_push (n);
2060 }
2061
2062 /* Label pointer equivalences. */
2063
2064 static void
2065 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2066 {
2067 unsigned int i, first_pred;
2068 bitmap_iterator bi;
2069
2070 bitmap_set_bit (si->visited, n);
2071
2072 /* Label and union our incoming edges's points to sets. */
2073 first_pred = -1U;
2074 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2075 {
2076 unsigned int w = si->node_mapping[i];
2077 if (!bitmap_bit_p (si->visited, w))
2078 label_visit (graph, si, w);
2079
2080 /* Skip unused edges */
2081 if (w == n || graph->pointer_label[w] == 0)
2082 continue;
2083
2084 if (graph->points_to[w])
2085 {
2086 if (!graph->points_to[n])
2087 {
2088 if (first_pred == -1U)
2089 first_pred = w;
2090 else
2091 {
2092 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2093 bitmap_ior (graph->points_to[n],
2094 graph->points_to[first_pred],
2095 graph->points_to[w]);
2096 }
2097 }
2098 else
2099 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2100 }
2101 }
2102
2103 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2104 if (!bitmap_bit_p (graph->direct_nodes, n))
2105 {
2106 if (!graph->points_to[n])
2107 {
2108 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2109 if (first_pred != -1U)
2110 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2111 }
2112 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2113 graph->pointer_label[n] = pointer_equiv_class++;
2114 equiv_class_label_t ecl;
2115 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2116 graph->points_to[n]);
2117 ecl->equivalence_class = graph->pointer_label[n];
2118 return;
2119 }
2120
2121 /* If there was only a single non-empty predecessor the pointer equiv
2122 class is the same. */
2123 if (!graph->points_to[n])
2124 {
2125 if (first_pred != -1U)
2126 {
2127 graph->pointer_label[n] = graph->pointer_label[first_pred];
2128 graph->points_to[n] = graph->points_to[first_pred];
2129 }
2130 return;
2131 }
2132
2133 if (!bitmap_empty_p (graph->points_to[n]))
2134 {
2135 equiv_class_label_t ecl;
2136 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2137 graph->points_to[n]);
2138 if (ecl->equivalence_class == 0)
2139 ecl->equivalence_class = pointer_equiv_class++;
2140 else
2141 {
2142 BITMAP_FREE (graph->points_to[n]);
2143 graph->points_to[n] = ecl->labels;
2144 }
2145 graph->pointer_label[n] = ecl->equivalence_class;
2146 }
2147 }
2148
2149 /* Print the pred graph in dot format. */
2150
2151 static void
2152 dump_pred_graph (struct scc_info *si, FILE *file)
2153 {
2154 unsigned int i;
2155
2156 /* Only print the graph if it has already been initialized: */
2157 if (!graph)
2158 return;
2159
2160 /* Prints the header of the dot file: */
2161 fprintf (file, "strict digraph {\n");
2162 fprintf (file, " node [\n shape = box\n ]\n");
2163 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2164 fprintf (file, "\n // List of nodes and complex constraints in "
2165 "the constraint graph:\n");
2166
2167 /* The next lines print the nodes in the graph together with the
2168 complex constraints attached to them. */
2169 for (i = 1; i < graph->size; i++)
2170 {
2171 if (i == FIRST_REF_NODE)
2172 continue;
2173 if (si->node_mapping[i] != i)
2174 continue;
2175 if (i < FIRST_REF_NODE)
2176 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2177 else
2178 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2179 if (graph->points_to[i]
2180 && !bitmap_empty_p (graph->points_to[i]))
2181 {
2182 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2183 unsigned j;
2184 bitmap_iterator bi;
2185 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2186 fprintf (file, " %d", j);
2187 fprintf (file, " }\"]");
2188 }
2189 fprintf (file, ";\n");
2190 }
2191
2192 /* Go over the edges. */
2193 fprintf (file, "\n // Edges in the constraint graph:\n");
2194 for (i = 1; i < graph->size; i++)
2195 {
2196 unsigned j;
2197 bitmap_iterator bi;
2198 if (si->node_mapping[i] != i)
2199 continue;
2200 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2201 {
2202 unsigned from = si->node_mapping[j];
2203 if (from < FIRST_REF_NODE)
2204 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2205 else
2206 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2207 fprintf (file, " -> ");
2208 if (i < FIRST_REF_NODE)
2209 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2210 else
2211 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2212 fprintf (file, ";\n");
2213 }
2214 }
2215
2216 /* Prints the tail of the dot file. */
2217 fprintf (file, "}\n");
2218 }
2219
2220 /* Perform offline variable substitution, discovering equivalence
2221 classes, and eliminating non-pointer variables. */
2222
2223 static struct scc_info *
2224 perform_var_substitution (constraint_graph_t graph)
2225 {
2226 unsigned int i;
2227 unsigned int size = graph->size;
2228 struct scc_info *si = init_scc_info (size);
2229
2230 bitmap_obstack_initialize (&iteration_obstack);
2231 pointer_equiv_class_table.create (511);
2232 location_equiv_class_table.create (511);
2233 pointer_equiv_class = 1;
2234 location_equiv_class = 1;
2235
2236 /* Condense the nodes, which means to find SCC's, count incoming
2237 predecessors, and unite nodes in SCC's. */
2238 for (i = 1; i < FIRST_REF_NODE; i++)
2239 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2240 condense_visit (graph, si, si->node_mapping[i]);
2241
2242 if (dump_file && (dump_flags & TDF_GRAPH))
2243 {
2244 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2245 "in dot format:\n");
2246 dump_pred_graph (si, dump_file);
2247 fprintf (dump_file, "\n\n");
2248 }
2249
2250 bitmap_clear (si->visited);
2251 /* Actually the label the nodes for pointer equivalences */
2252 for (i = 1; i < FIRST_REF_NODE; i++)
2253 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2254 label_visit (graph, si, si->node_mapping[i]);
2255
2256 /* Calculate location equivalence labels. */
2257 for (i = 1; i < FIRST_REF_NODE; i++)
2258 {
2259 bitmap pointed_by;
2260 bitmap_iterator bi;
2261 unsigned int j;
2262
2263 if (!graph->pointed_by[i])
2264 continue;
2265 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2266
2267 /* Translate the pointed-by mapping for pointer equivalence
2268 labels. */
2269 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2270 {
2271 bitmap_set_bit (pointed_by,
2272 graph->pointer_label[si->node_mapping[j]]);
2273 }
2274 /* The original pointed_by is now dead. */
2275 BITMAP_FREE (graph->pointed_by[i]);
2276
2277 /* Look up the location equivalence label if one exists, or make
2278 one otherwise. */
2279 equiv_class_label_t ecl;
2280 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2281 if (ecl->equivalence_class == 0)
2282 ecl->equivalence_class = location_equiv_class++;
2283 else
2284 {
2285 if (dump_file && (dump_flags & TDF_DETAILS))
2286 fprintf (dump_file, "Found location equivalence for node %s\n",
2287 get_varinfo (i)->name);
2288 BITMAP_FREE (pointed_by);
2289 }
2290 graph->loc_label[i] = ecl->equivalence_class;
2291
2292 }
2293
2294 if (dump_file && (dump_flags & TDF_DETAILS))
2295 for (i = 1; i < FIRST_REF_NODE; i++)
2296 {
2297 unsigned j = si->node_mapping[i];
2298 if (j != i)
2299 {
2300 fprintf (dump_file, "%s node id %d ",
2301 bitmap_bit_p (graph->direct_nodes, i)
2302 ? "Direct" : "Indirect", i);
2303 if (i < FIRST_REF_NODE)
2304 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2305 else
2306 fprintf (dump_file, "\"*%s\"",
2307 get_varinfo (i - FIRST_REF_NODE)->name);
2308 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2309 if (j < FIRST_REF_NODE)
2310 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2311 else
2312 fprintf (dump_file, "\"*%s\"\n",
2313 get_varinfo (j - FIRST_REF_NODE)->name);
2314 }
2315 else
2316 {
2317 fprintf (dump_file,
2318 "Equivalence classes for %s node id %d ",
2319 bitmap_bit_p (graph->direct_nodes, i)
2320 ? "direct" : "indirect", i);
2321 if (i < FIRST_REF_NODE)
2322 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2323 else
2324 fprintf (dump_file, "\"*%s\"",
2325 get_varinfo (i - FIRST_REF_NODE)->name);
2326 fprintf (dump_file,
2327 ": pointer %d, location %d\n",
2328 graph->pointer_label[i], graph->loc_label[i]);
2329 }
2330 }
2331
2332 /* Quickly eliminate our non-pointer variables. */
2333
2334 for (i = 1; i < FIRST_REF_NODE; i++)
2335 {
2336 unsigned int node = si->node_mapping[i];
2337
2338 if (graph->pointer_label[node] == 0)
2339 {
2340 if (dump_file && (dump_flags & TDF_DETAILS))
2341 fprintf (dump_file,
2342 "%s is a non-pointer variable, eliminating edges.\n",
2343 get_varinfo (node)->name);
2344 stats.nonpointer_vars++;
2345 clear_edges_for_node (graph, node);
2346 }
2347 }
2348
2349 return si;
2350 }
2351
2352 /* Free information that was only necessary for variable
2353 substitution. */
2354
2355 static void
2356 free_var_substitution_info (struct scc_info *si)
2357 {
2358 free_scc_info (si);
2359 free (graph->pointer_label);
2360 free (graph->loc_label);
2361 free (graph->pointed_by);
2362 free (graph->points_to);
2363 free (graph->eq_rep);
2364 sbitmap_free (graph->direct_nodes);
2365 pointer_equiv_class_table.dispose ();
2366 location_equiv_class_table.dispose ();
2367 bitmap_obstack_release (&iteration_obstack);
2368 }
2369
2370 /* Return an existing node that is equivalent to NODE, which has
2371 equivalence class LABEL, if one exists. Return NODE otherwise. */
2372
2373 static unsigned int
2374 find_equivalent_node (constraint_graph_t graph,
2375 unsigned int node, unsigned int label)
2376 {
2377 /* If the address version of this variable is unused, we can
2378 substitute it for anything else with the same label.
2379 Otherwise, we know the pointers are equivalent, but not the
2380 locations, and we can unite them later. */
2381
2382 if (!bitmap_bit_p (graph->address_taken, node))
2383 {
2384 gcc_checking_assert (label < graph->size);
2385
2386 if (graph->eq_rep[label] != -1)
2387 {
2388 /* Unify the two variables since we know they are equivalent. */
2389 if (unite (graph->eq_rep[label], node))
2390 unify_nodes (graph, graph->eq_rep[label], node, false);
2391 return graph->eq_rep[label];
2392 }
2393 else
2394 {
2395 graph->eq_rep[label] = node;
2396 graph->pe_rep[label] = node;
2397 }
2398 }
2399 else
2400 {
2401 gcc_checking_assert (label < graph->size);
2402 graph->pe[node] = label;
2403 if (graph->pe_rep[label] == -1)
2404 graph->pe_rep[label] = node;
2405 }
2406
2407 return node;
2408 }
2409
2410 /* Unite pointer equivalent but not location equivalent nodes in
2411 GRAPH. This may only be performed once variable substitution is
2412 finished. */
2413
2414 static void
2415 unite_pointer_equivalences (constraint_graph_t graph)
2416 {
2417 unsigned int i;
2418
2419 /* Go through the pointer equivalences and unite them to their
2420 representative, if they aren't already. */
2421 for (i = 1; i < FIRST_REF_NODE; i++)
2422 {
2423 unsigned int label = graph->pe[i];
2424 if (label)
2425 {
2426 int label_rep = graph->pe_rep[label];
2427
2428 if (label_rep == -1)
2429 continue;
2430
2431 label_rep = find (label_rep);
2432 if (label_rep >= 0 && unite (label_rep, find (i)))
2433 unify_nodes (graph, label_rep, i, false);
2434 }
2435 }
2436 }
2437
2438 /* Move complex constraints to the GRAPH nodes they belong to. */
2439
2440 static void
2441 move_complex_constraints (constraint_graph_t graph)
2442 {
2443 int i;
2444 constraint_t c;
2445
2446 FOR_EACH_VEC_ELT (constraints, i, c)
2447 {
2448 if (c)
2449 {
2450 struct constraint_expr lhs = c->lhs;
2451 struct constraint_expr rhs = c->rhs;
2452
2453 if (lhs.type == DEREF)
2454 {
2455 insert_into_complex (graph, lhs.var, c);
2456 }
2457 else if (rhs.type == DEREF)
2458 {
2459 if (!(get_varinfo (lhs.var)->is_special_var))
2460 insert_into_complex (graph, rhs.var, c);
2461 }
2462 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2463 && (lhs.offset != 0 || rhs.offset != 0))
2464 {
2465 insert_into_complex (graph, rhs.var, c);
2466 }
2467 }
2468 }
2469 }
2470
2471
2472 /* Optimize and rewrite complex constraints while performing
2473 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2474 result of perform_variable_substitution. */
2475
2476 static void
2477 rewrite_constraints (constraint_graph_t graph,
2478 struct scc_info *si)
2479 {
2480 int i;
2481 constraint_t c;
2482
2483 #ifdef ENABLE_CHECKING
2484 for (unsigned int j = 0; j < graph->size; j++)
2485 gcc_assert (find (j) == j);
2486 #endif
2487
2488 FOR_EACH_VEC_ELT (constraints, i, c)
2489 {
2490 struct constraint_expr lhs = c->lhs;
2491 struct constraint_expr rhs = c->rhs;
2492 unsigned int lhsvar = find (lhs.var);
2493 unsigned int rhsvar = find (rhs.var);
2494 unsigned int lhsnode, rhsnode;
2495 unsigned int lhslabel, rhslabel;
2496
2497 lhsnode = si->node_mapping[lhsvar];
2498 rhsnode = si->node_mapping[rhsvar];
2499 lhslabel = graph->pointer_label[lhsnode];
2500 rhslabel = graph->pointer_label[rhsnode];
2501
2502 /* See if it is really a non-pointer variable, and if so, ignore
2503 the constraint. */
2504 if (lhslabel == 0)
2505 {
2506 if (dump_file && (dump_flags & TDF_DETAILS))
2507 {
2508
2509 fprintf (dump_file, "%s is a non-pointer variable,"
2510 "ignoring constraint:",
2511 get_varinfo (lhs.var)->name);
2512 dump_constraint (dump_file, c);
2513 fprintf (dump_file, "\n");
2514 }
2515 constraints[i] = NULL;
2516 continue;
2517 }
2518
2519 if (rhslabel == 0)
2520 {
2521 if (dump_file && (dump_flags & TDF_DETAILS))
2522 {
2523
2524 fprintf (dump_file, "%s is a non-pointer variable,"
2525 "ignoring constraint:",
2526 get_varinfo (rhs.var)->name);
2527 dump_constraint (dump_file, c);
2528 fprintf (dump_file, "\n");
2529 }
2530 constraints[i] = NULL;
2531 continue;
2532 }
2533
2534 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2535 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2536 c->lhs.var = lhsvar;
2537 c->rhs.var = rhsvar;
2538 }
2539 }
2540
2541 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2542 part of an SCC, false otherwise. */
2543
2544 static bool
2545 eliminate_indirect_cycles (unsigned int node)
2546 {
2547 if (graph->indirect_cycles[node] != -1
2548 && !bitmap_empty_p (get_varinfo (node)->solution))
2549 {
2550 unsigned int i;
2551 vec<unsigned> queue = vNULL;
2552 int queuepos;
2553 unsigned int to = find (graph->indirect_cycles[node]);
2554 bitmap_iterator bi;
2555
2556 /* We can't touch the solution set and call unify_nodes
2557 at the same time, because unify_nodes is going to do
2558 bitmap unions into it. */
2559
2560 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2561 {
2562 if (find (i) == i && i != to)
2563 {
2564 if (unite (to, i))
2565 queue.safe_push (i);
2566 }
2567 }
2568
2569 for (queuepos = 0;
2570 queue.iterate (queuepos, &i);
2571 queuepos++)
2572 {
2573 unify_nodes (graph, to, i, true);
2574 }
2575 queue.release ();
2576 return true;
2577 }
2578 return false;
2579 }
2580
2581 /* Solve the constraint graph GRAPH using our worklist solver.
2582 This is based on the PW* family of solvers from the "Efficient Field
2583 Sensitive Pointer Analysis for C" paper.
2584 It works by iterating over all the graph nodes, processing the complex
2585 constraints and propagating the copy constraints, until everything stops
2586 changed. This corresponds to steps 6-8 in the solving list given above. */
2587
2588 static void
2589 solve_graph (constraint_graph_t graph)
2590 {
2591 unsigned int size = graph->size;
2592 unsigned int i;
2593 bitmap pts;
2594
2595 changed = BITMAP_ALLOC (NULL);
2596
2597 /* Mark all initial non-collapsed nodes as changed. */
2598 for (i = 1; i < size; i++)
2599 {
2600 varinfo_t ivi = get_varinfo (i);
2601 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2602 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2603 || graph->complex[i].length () > 0))
2604 bitmap_set_bit (changed, i);
2605 }
2606
2607 /* Allocate a bitmap to be used to store the changed bits. */
2608 pts = BITMAP_ALLOC (&pta_obstack);
2609
2610 while (!bitmap_empty_p (changed))
2611 {
2612 unsigned int i;
2613 struct topo_info *ti = init_topo_info ();
2614 stats.iterations++;
2615
2616 bitmap_obstack_initialize (&iteration_obstack);
2617
2618 compute_topo_order (graph, ti);
2619
2620 while (ti->topo_order.length () != 0)
2621 {
2622
2623 i = ti->topo_order.pop ();
2624
2625 /* If this variable is not a representative, skip it. */
2626 if (find (i) != i)
2627 continue;
2628
2629 /* In certain indirect cycle cases, we may merge this
2630 variable to another. */
2631 if (eliminate_indirect_cycles (i) && find (i) != i)
2632 continue;
2633
2634 /* If the node has changed, we need to process the
2635 complex constraints and outgoing edges again. */
2636 if (bitmap_clear_bit (changed, i))
2637 {
2638 unsigned int j;
2639 constraint_t c;
2640 bitmap solution;
2641 vec<constraint_t> complex = graph->complex[i];
2642 varinfo_t vi = get_varinfo (i);
2643 bool solution_empty;
2644
2645 /* Compute the changed set of solution bits. If anything
2646 is in the solution just propagate that. */
2647 if (bitmap_bit_p (vi->solution, anything_id))
2648 {
2649 /* If anything is also in the old solution there is
2650 nothing to do.
2651 ??? But we shouldn't ended up with "changed" set ... */
2652 if (vi->oldsolution
2653 && bitmap_bit_p (vi->oldsolution, anything_id))
2654 continue;
2655 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2656 }
2657 else if (vi->oldsolution)
2658 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2659 else
2660 bitmap_copy (pts, vi->solution);
2661
2662 if (bitmap_empty_p (pts))
2663 continue;
2664
2665 if (vi->oldsolution)
2666 bitmap_ior_into (vi->oldsolution, pts);
2667 else
2668 {
2669 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2670 bitmap_copy (vi->oldsolution, pts);
2671 }
2672
2673 solution = vi->solution;
2674 solution_empty = bitmap_empty_p (solution);
2675
2676 /* Process the complex constraints */
2677 FOR_EACH_VEC_ELT (complex, j, c)
2678 {
2679 /* XXX: This is going to unsort the constraints in
2680 some cases, which will occasionally add duplicate
2681 constraints during unification. This does not
2682 affect correctness. */
2683 c->lhs.var = find (c->lhs.var);
2684 c->rhs.var = find (c->rhs.var);
2685
2686 /* The only complex constraint that can change our
2687 solution to non-empty, given an empty solution,
2688 is a constraint where the lhs side is receiving
2689 some set from elsewhere. */
2690 if (!solution_empty || c->lhs.type != DEREF)
2691 do_complex_constraint (graph, c, pts);
2692 }
2693
2694 solution_empty = bitmap_empty_p (solution);
2695
2696 if (!solution_empty)
2697 {
2698 bitmap_iterator bi;
2699 unsigned eff_escaped_id = find (escaped_id);
2700
2701 /* Propagate solution to all successors. */
2702 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2703 0, j, bi)
2704 {
2705 bitmap tmp;
2706 bool flag;
2707
2708 unsigned int to = find (j);
2709 tmp = get_varinfo (to)->solution;
2710 flag = false;
2711
2712 /* Don't try to propagate to ourselves. */
2713 if (to == i)
2714 continue;
2715
2716 /* If we propagate from ESCAPED use ESCAPED as
2717 placeholder. */
2718 if (i == eff_escaped_id)
2719 flag = bitmap_set_bit (tmp, escaped_id);
2720 else
2721 flag = bitmap_ior_into (tmp, pts);
2722
2723 if (flag)
2724 bitmap_set_bit (changed, to);
2725 }
2726 }
2727 }
2728 }
2729 free_topo_info (ti);
2730 bitmap_obstack_release (&iteration_obstack);
2731 }
2732
2733 BITMAP_FREE (pts);
2734 BITMAP_FREE (changed);
2735 bitmap_obstack_release (&oldpta_obstack);
2736 }
2737
2738 /* Map from trees to variable infos. */
2739 static struct pointer_map_t *vi_for_tree;
2740
2741
2742 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2743
2744 static void
2745 insert_vi_for_tree (tree t, varinfo_t vi)
2746 {
2747 void **slot = pointer_map_insert (vi_for_tree, t);
2748 gcc_assert (vi);
2749 gcc_assert (*slot == NULL);
2750 *slot = vi;
2751 }
2752
2753 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2754 exist in the map, return NULL, otherwise, return the varinfo we found. */
2755
2756 static varinfo_t
2757 lookup_vi_for_tree (tree t)
2758 {
2759 void **slot = pointer_map_contains (vi_for_tree, t);
2760 if (slot == NULL)
2761 return NULL;
2762
2763 return (varinfo_t) *slot;
2764 }
2765
2766 /* Return a printable name for DECL */
2767
2768 static const char *
2769 alias_get_name (tree decl)
2770 {
2771 const char *res = NULL;
2772 char *temp;
2773 int num_printed = 0;
2774
2775 if (!dump_file)
2776 return "NULL";
2777
2778 if (TREE_CODE (decl) == SSA_NAME)
2779 {
2780 res = get_name (decl);
2781 if (res)
2782 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2783 else
2784 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2785 if (num_printed > 0)
2786 {
2787 res = ggc_strdup (temp);
2788 free (temp);
2789 }
2790 }
2791 else if (DECL_P (decl))
2792 {
2793 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2794 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2795 else
2796 {
2797 res = get_name (decl);
2798 if (!res)
2799 {
2800 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2801 if (num_printed > 0)
2802 {
2803 res = ggc_strdup (temp);
2804 free (temp);
2805 }
2806 }
2807 }
2808 }
2809 if (res != NULL)
2810 return res;
2811
2812 return "NULL";
2813 }
2814
2815 /* Find the variable id for tree T in the map.
2816 If T doesn't exist in the map, create an entry for it and return it. */
2817
2818 static varinfo_t
2819 get_vi_for_tree (tree t)
2820 {
2821 void **slot = pointer_map_contains (vi_for_tree, t);
2822 if (slot == NULL)
2823 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2824
2825 return (varinfo_t) *slot;
2826 }
2827
2828 /* Get a scalar constraint expression for a new temporary variable. */
2829
2830 static struct constraint_expr
2831 new_scalar_tmp_constraint_exp (const char *name)
2832 {
2833 struct constraint_expr tmp;
2834 varinfo_t vi;
2835
2836 vi = new_var_info (NULL_TREE, name);
2837 vi->offset = 0;
2838 vi->size = -1;
2839 vi->fullsize = -1;
2840 vi->is_full_var = 1;
2841
2842 tmp.var = vi->id;
2843 tmp.type = SCALAR;
2844 tmp.offset = 0;
2845
2846 return tmp;
2847 }
2848
2849 /* Get a constraint expression vector from an SSA_VAR_P node.
2850 If address_p is true, the result will be taken its address of. */
2851
2852 static void
2853 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2854 {
2855 struct constraint_expr cexpr;
2856 varinfo_t vi;
2857
2858 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2859 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2860
2861 /* For parameters, get at the points-to set for the actual parm
2862 decl. */
2863 if (TREE_CODE (t) == SSA_NAME
2864 && SSA_NAME_IS_DEFAULT_DEF (t)
2865 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2866 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2867 {
2868 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2869 return;
2870 }
2871
2872 /* For global variables resort to the alias target. */
2873 if (TREE_CODE (t) == VAR_DECL
2874 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2875 {
2876 struct varpool_node *node = varpool_get_node (t);
2877 if (node && node->symbol.alias && node->symbol.analyzed)
2878 {
2879 node = varpool_variable_node (node, NULL);
2880 t = node->symbol.decl;
2881 }
2882 }
2883
2884 vi = get_vi_for_tree (t);
2885 cexpr.var = vi->id;
2886 cexpr.type = SCALAR;
2887 cexpr.offset = 0;
2888 /* If we determine the result is "anything", and we know this is readonly,
2889 say it points to readonly memory instead. */
2890 if (cexpr.var == anything_id && TREE_READONLY (t))
2891 {
2892 gcc_unreachable ();
2893 cexpr.type = ADDRESSOF;
2894 cexpr.var = readonly_id;
2895 }
2896
2897 /* If we are not taking the address of the constraint expr, add all
2898 sub-fiels of the variable as well. */
2899 if (!address_p
2900 && !vi->is_full_var)
2901 {
2902 for (; vi; vi = vi_next (vi))
2903 {
2904 cexpr.var = vi->id;
2905 results->safe_push (cexpr);
2906 }
2907 return;
2908 }
2909
2910 results->safe_push (cexpr);
2911 }
2912
2913 /* Process constraint T, performing various simplifications and then
2914 adding it to our list of overall constraints. */
2915
2916 static void
2917 process_constraint (constraint_t t)
2918 {
2919 struct constraint_expr rhs = t->rhs;
2920 struct constraint_expr lhs = t->lhs;
2921
2922 gcc_assert (rhs.var < varmap.length ());
2923 gcc_assert (lhs.var < varmap.length ());
2924
2925 /* If we didn't get any useful constraint from the lhs we get
2926 &ANYTHING as fallback from get_constraint_for. Deal with
2927 it here by turning it into *ANYTHING. */
2928 if (lhs.type == ADDRESSOF
2929 && lhs.var == anything_id)
2930 lhs.type = DEREF;
2931
2932 /* ADDRESSOF on the lhs is invalid. */
2933 gcc_assert (lhs.type != ADDRESSOF);
2934
2935 /* We shouldn't add constraints from things that cannot have pointers.
2936 It's not completely trivial to avoid in the callers, so do it here. */
2937 if (rhs.type != ADDRESSOF
2938 && !get_varinfo (rhs.var)->may_have_pointers)
2939 return;
2940
2941 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2942 if (!get_varinfo (lhs.var)->may_have_pointers)
2943 return;
2944
2945 /* This can happen in our IR with things like n->a = *p */
2946 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2947 {
2948 /* Split into tmp = *rhs, *lhs = tmp */
2949 struct constraint_expr tmplhs;
2950 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2951 process_constraint (new_constraint (tmplhs, rhs));
2952 process_constraint (new_constraint (lhs, tmplhs));
2953 }
2954 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2955 {
2956 /* Split into tmp = &rhs, *lhs = tmp */
2957 struct constraint_expr tmplhs;
2958 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2959 process_constraint (new_constraint (tmplhs, rhs));
2960 process_constraint (new_constraint (lhs, tmplhs));
2961 }
2962 else
2963 {
2964 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2965 constraints.safe_push (t);
2966 }
2967 }
2968
2969
2970 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2971 structure. */
2972
2973 static HOST_WIDE_INT
2974 bitpos_of_field (const tree fdecl)
2975 {
2976 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2977 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2978 return -1;
2979
2980 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
2981 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2982 }
2983
2984
2985 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2986 resulting constraint expressions in *RESULTS. */
2987
2988 static void
2989 get_constraint_for_ptr_offset (tree ptr, tree offset,
2990 vec<ce_s> *results)
2991 {
2992 struct constraint_expr c;
2993 unsigned int j, n;
2994 HOST_WIDE_INT rhsoffset;
2995
2996 /* If we do not do field-sensitive PTA adding offsets to pointers
2997 does not change the points-to solution. */
2998 if (!use_field_sensitive)
2999 {
3000 get_constraint_for_rhs (ptr, results);
3001 return;
3002 }
3003
3004 /* If the offset is not a non-negative integer constant that fits
3005 in a HOST_WIDE_INT, we have to fall back to a conservative
3006 solution which includes all sub-fields of all pointed-to
3007 variables of ptr. */
3008 if (offset == NULL_TREE
3009 || TREE_CODE (offset) != INTEGER_CST)
3010 rhsoffset = UNKNOWN_OFFSET;
3011 else
3012 {
3013 /* Sign-extend the offset. */
3014 double_int soffset = tree_to_double_int (offset)
3015 .sext (TYPE_PRECISION (TREE_TYPE (offset)));
3016 if (!soffset.fits_shwi ())
3017 rhsoffset = UNKNOWN_OFFSET;
3018 else
3019 {
3020 /* Make sure the bit-offset also fits. */
3021 HOST_WIDE_INT rhsunitoffset = soffset.low;
3022 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3023 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3024 rhsoffset = UNKNOWN_OFFSET;
3025 }
3026 }
3027
3028 get_constraint_for_rhs (ptr, results);
3029 if (rhsoffset == 0)
3030 return;
3031
3032 /* As we are eventually appending to the solution do not use
3033 vec::iterate here. */
3034 n = results->length ();
3035 for (j = 0; j < n; j++)
3036 {
3037 varinfo_t curr;
3038 c = (*results)[j];
3039 curr = get_varinfo (c.var);
3040
3041 if (c.type == ADDRESSOF
3042 /* If this varinfo represents a full variable just use it. */
3043 && curr->is_full_var)
3044 c.offset = 0;
3045 else if (c.type == ADDRESSOF
3046 /* If we do not know the offset add all subfields. */
3047 && rhsoffset == UNKNOWN_OFFSET)
3048 {
3049 varinfo_t temp = get_varinfo (curr->head);
3050 do
3051 {
3052 struct constraint_expr c2;
3053 c2.var = temp->id;
3054 c2.type = ADDRESSOF;
3055 c2.offset = 0;
3056 if (c2.var != c.var)
3057 results->safe_push (c2);
3058 temp = vi_next (temp);
3059 }
3060 while (temp);
3061 }
3062 else if (c.type == ADDRESSOF)
3063 {
3064 varinfo_t temp;
3065 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3066
3067 /* Search the sub-field which overlaps with the
3068 pointed-to offset. If the result is outside of the variable
3069 we have to provide a conservative result, as the variable is
3070 still reachable from the resulting pointer (even though it
3071 technically cannot point to anything). The last and first
3072 sub-fields are such conservative results.
3073 ??? If we always had a sub-field for &object + 1 then
3074 we could represent this in a more precise way. */
3075 if (rhsoffset < 0
3076 && curr->offset < offset)
3077 offset = 0;
3078 temp = first_or_preceding_vi_for_offset (curr, offset);
3079
3080 /* If the found variable is not exactly at the pointed to
3081 result, we have to include the next variable in the
3082 solution as well. Otherwise two increments by offset / 2
3083 do not result in the same or a conservative superset
3084 solution. */
3085 if (temp->offset != offset
3086 && temp->next != 0)
3087 {
3088 struct constraint_expr c2;
3089 c2.var = temp->next;
3090 c2.type = ADDRESSOF;
3091 c2.offset = 0;
3092 results->safe_push (c2);
3093 }
3094 c.var = temp->id;
3095 c.offset = 0;
3096 }
3097 else
3098 c.offset = rhsoffset;
3099
3100 (*results)[j] = c;
3101 }
3102 }
3103
3104
3105 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3106 If address_p is true the result will be taken its address of.
3107 If lhs_p is true then the constraint expression is assumed to be used
3108 as the lhs. */
3109
3110 static void
3111 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3112 bool address_p, bool lhs_p)
3113 {
3114 tree orig_t = t;
3115 HOST_WIDE_INT bitsize = -1;
3116 HOST_WIDE_INT bitmaxsize = -1;
3117 HOST_WIDE_INT bitpos;
3118 tree forzero;
3119
3120 /* Some people like to do cute things like take the address of
3121 &0->a.b */
3122 forzero = t;
3123 while (handled_component_p (forzero)
3124 || INDIRECT_REF_P (forzero)
3125 || TREE_CODE (forzero) == MEM_REF)
3126 forzero = TREE_OPERAND (forzero, 0);
3127
3128 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3129 {
3130 struct constraint_expr temp;
3131
3132 temp.offset = 0;
3133 temp.var = integer_id;
3134 temp.type = SCALAR;
3135 results->safe_push (temp);
3136 return;
3137 }
3138
3139 /* Handle type-punning through unions. If we are extracting a pointer
3140 from a union via a possibly type-punning access that pointer
3141 points to anything, similar to a conversion of an integer to
3142 a pointer. */
3143 if (!lhs_p)
3144 {
3145 tree u;
3146 for (u = t;
3147 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3148 u = TREE_OPERAND (u, 0))
3149 if (TREE_CODE (u) == COMPONENT_REF
3150 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3151 {
3152 struct constraint_expr temp;
3153
3154 temp.offset = 0;
3155 temp.var = anything_id;
3156 temp.type = ADDRESSOF;
3157 results->safe_push (temp);
3158 return;
3159 }
3160 }
3161
3162 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3163
3164 /* Pretend to take the address of the base, we'll take care of
3165 adding the required subset of sub-fields below. */
3166 get_constraint_for_1 (t, results, true, lhs_p);
3167 gcc_assert (results->length () == 1);
3168 struct constraint_expr &result = results->last ();
3169
3170 if (result.type == SCALAR
3171 && get_varinfo (result.var)->is_full_var)
3172 /* For single-field vars do not bother about the offset. */
3173 result.offset = 0;
3174 else if (result.type == SCALAR)
3175 {
3176 /* In languages like C, you can access one past the end of an
3177 array. You aren't allowed to dereference it, so we can
3178 ignore this constraint. When we handle pointer subtraction,
3179 we may have to do something cute here. */
3180
3181 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3182 && bitmaxsize != 0)
3183 {
3184 /* It's also not true that the constraint will actually start at the
3185 right offset, it may start in some padding. We only care about
3186 setting the constraint to the first actual field it touches, so
3187 walk to find it. */
3188 struct constraint_expr cexpr = result;
3189 varinfo_t curr;
3190 results->pop ();
3191 cexpr.offset = 0;
3192 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3193 {
3194 if (ranges_overlap_p (curr->offset, curr->size,
3195 bitpos, bitmaxsize))
3196 {
3197 cexpr.var = curr->id;
3198 results->safe_push (cexpr);
3199 if (address_p)
3200 break;
3201 }
3202 }
3203 /* If we are going to take the address of this field then
3204 to be able to compute reachability correctly add at least
3205 the last field of the variable. */
3206 if (address_p && results->length () == 0)
3207 {
3208 curr = get_varinfo (cexpr.var);
3209 while (curr->next != 0)
3210 curr = vi_next (curr);
3211 cexpr.var = curr->id;
3212 results->safe_push (cexpr);
3213 }
3214 else if (results->length () == 0)
3215 /* Assert that we found *some* field there. The user couldn't be
3216 accessing *only* padding. */
3217 /* Still the user could access one past the end of an array
3218 embedded in a struct resulting in accessing *only* padding. */
3219 /* Or accessing only padding via type-punning to a type
3220 that has a filed just in padding space. */
3221 {
3222 cexpr.type = SCALAR;
3223 cexpr.var = anything_id;
3224 cexpr.offset = 0;
3225 results->safe_push (cexpr);
3226 }
3227 }
3228 else if (bitmaxsize == 0)
3229 {
3230 if (dump_file && (dump_flags & TDF_DETAILS))
3231 fprintf (dump_file, "Access to zero-sized part of variable,"
3232 "ignoring\n");
3233 }
3234 else
3235 if (dump_file && (dump_flags & TDF_DETAILS))
3236 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3237 }
3238 else if (result.type == DEREF)
3239 {
3240 /* If we do not know exactly where the access goes say so. Note
3241 that only for non-structure accesses we know that we access
3242 at most one subfiled of any variable. */
3243 if (bitpos == -1
3244 || bitsize != bitmaxsize
3245 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3246 || result.offset == UNKNOWN_OFFSET)
3247 result.offset = UNKNOWN_OFFSET;
3248 else
3249 result.offset += bitpos;
3250 }
3251 else if (result.type == ADDRESSOF)
3252 {
3253 /* We can end up here for component references on a
3254 VIEW_CONVERT_EXPR <>(&foobar). */
3255 result.type = SCALAR;
3256 result.var = anything_id;
3257 result.offset = 0;
3258 }
3259 else
3260 gcc_unreachable ();
3261 }
3262
3263
3264 /* Dereference the constraint expression CONS, and return the result.
3265 DEREF (ADDRESSOF) = SCALAR
3266 DEREF (SCALAR) = DEREF
3267 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3268 This is needed so that we can handle dereferencing DEREF constraints. */
3269
3270 static void
3271 do_deref (vec<ce_s> *constraints)
3272 {
3273 struct constraint_expr *c;
3274 unsigned int i = 0;
3275
3276 FOR_EACH_VEC_ELT (*constraints, i, c)
3277 {
3278 if (c->type == SCALAR)
3279 c->type = DEREF;
3280 else if (c->type == ADDRESSOF)
3281 c->type = SCALAR;
3282 else if (c->type == DEREF)
3283 {
3284 struct constraint_expr tmplhs;
3285 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3286 process_constraint (new_constraint (tmplhs, *c));
3287 c->var = tmplhs.var;
3288 }
3289 else
3290 gcc_unreachable ();
3291 }
3292 }
3293
3294 /* Given a tree T, return the constraint expression for taking the
3295 address of it. */
3296
3297 static void
3298 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3299 {
3300 struct constraint_expr *c;
3301 unsigned int i;
3302
3303 get_constraint_for_1 (t, results, true, true);
3304
3305 FOR_EACH_VEC_ELT (*results, i, c)
3306 {
3307 if (c->type == DEREF)
3308 c->type = SCALAR;
3309 else
3310 c->type = ADDRESSOF;
3311 }
3312 }
3313
3314 /* Given a tree T, return the constraint expression for it. */
3315
3316 static void
3317 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3318 bool lhs_p)
3319 {
3320 struct constraint_expr temp;
3321
3322 /* x = integer is all glommed to a single variable, which doesn't
3323 point to anything by itself. That is, of course, unless it is an
3324 integer constant being treated as a pointer, in which case, we
3325 will return that this is really the addressof anything. This
3326 happens below, since it will fall into the default case. The only
3327 case we know something about an integer treated like a pointer is
3328 when it is the NULL pointer, and then we just say it points to
3329 NULL.
3330
3331 Do not do that if -fno-delete-null-pointer-checks though, because
3332 in that case *NULL does not fail, so it _should_ alias *anything.
3333 It is not worth adding a new option or renaming the existing one,
3334 since this case is relatively obscure. */
3335 if ((TREE_CODE (t) == INTEGER_CST
3336 && integer_zerop (t))
3337 /* The only valid CONSTRUCTORs in gimple with pointer typed
3338 elements are zero-initializer. But in IPA mode we also
3339 process global initializers, so verify at least. */
3340 || (TREE_CODE (t) == CONSTRUCTOR
3341 && CONSTRUCTOR_NELTS (t) == 0))
3342 {
3343 if (flag_delete_null_pointer_checks)
3344 temp.var = nothing_id;
3345 else
3346 temp.var = nonlocal_id;
3347 temp.type = ADDRESSOF;
3348 temp.offset = 0;
3349 results->safe_push (temp);
3350 return;
3351 }
3352
3353 /* String constants are read-only. */
3354 if (TREE_CODE (t) == STRING_CST)
3355 {
3356 temp.var = readonly_id;
3357 temp.type = SCALAR;
3358 temp.offset = 0;
3359 results->safe_push (temp);
3360 return;
3361 }
3362
3363 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3364 {
3365 case tcc_expression:
3366 {
3367 switch (TREE_CODE (t))
3368 {
3369 case ADDR_EXPR:
3370 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3371 return;
3372 default:;
3373 }
3374 break;
3375 }
3376 case tcc_reference:
3377 {
3378 switch (TREE_CODE (t))
3379 {
3380 case MEM_REF:
3381 {
3382 struct constraint_expr cs;
3383 varinfo_t vi, curr;
3384 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3385 TREE_OPERAND (t, 1), results);
3386 do_deref (results);
3387
3388 /* If we are not taking the address then make sure to process
3389 all subvariables we might access. */
3390 if (address_p)
3391 return;
3392
3393 cs = results->last ();
3394 if (cs.type == DEREF
3395 && type_can_have_subvars (TREE_TYPE (t)))
3396 {
3397 /* For dereferences this means we have to defer it
3398 to solving time. */
3399 results->last ().offset = UNKNOWN_OFFSET;
3400 return;
3401 }
3402 if (cs.type != SCALAR)
3403 return;
3404
3405 vi = get_varinfo (cs.var);
3406 curr = vi_next (vi);
3407 if (!vi->is_full_var
3408 && curr)
3409 {
3410 unsigned HOST_WIDE_INT size;
3411 if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
3412 size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
3413 else
3414 size = -1;
3415 for (; curr; curr = vi_next (curr))
3416 {
3417 if (curr->offset - vi->offset < size)
3418 {
3419 cs.var = curr->id;
3420 results->safe_push (cs);
3421 }
3422 else
3423 break;
3424 }
3425 }
3426 return;
3427 }
3428 case ARRAY_REF:
3429 case ARRAY_RANGE_REF:
3430 case COMPONENT_REF:
3431 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3432 return;
3433 case VIEW_CONVERT_EXPR:
3434 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3435 lhs_p);
3436 return;
3437 /* We are missing handling for TARGET_MEM_REF here. */
3438 default:;
3439 }
3440 break;
3441 }
3442 case tcc_exceptional:
3443 {
3444 switch (TREE_CODE (t))
3445 {
3446 case SSA_NAME:
3447 {
3448 get_constraint_for_ssa_var (t, results, address_p);
3449 return;
3450 }
3451 case CONSTRUCTOR:
3452 {
3453 unsigned int i;
3454 tree val;
3455 vec<ce_s> tmp = vNULL;
3456 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3457 {
3458 struct constraint_expr *rhsp;
3459 unsigned j;
3460 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3461 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3462 results->safe_push (*rhsp);
3463 tmp.truncate (0);
3464 }
3465 tmp.release ();
3466 /* We do not know whether the constructor was complete,
3467 so technically we have to add &NOTHING or &ANYTHING
3468 like we do for an empty constructor as well. */
3469 return;
3470 }
3471 default:;
3472 }
3473 break;
3474 }
3475 case tcc_declaration:
3476 {
3477 get_constraint_for_ssa_var (t, results, address_p);
3478 return;
3479 }
3480 case tcc_constant:
3481 {
3482 /* We cannot refer to automatic variables through constants. */
3483 temp.type = ADDRESSOF;
3484 temp.var = nonlocal_id;
3485 temp.offset = 0;
3486 results->safe_push (temp);
3487 return;
3488 }
3489 default:;
3490 }
3491
3492 /* The default fallback is a constraint from anything. */
3493 temp.type = ADDRESSOF;
3494 temp.var = anything_id;
3495 temp.offset = 0;
3496 results->safe_push (temp);
3497 }
3498
3499 /* Given a gimple tree T, return the constraint expression vector for it. */
3500
3501 static void
3502 get_constraint_for (tree t, vec<ce_s> *results)
3503 {
3504 gcc_assert (results->length () == 0);
3505
3506 get_constraint_for_1 (t, results, false, true);
3507 }
3508
3509 /* Given a gimple tree T, return the constraint expression vector for it
3510 to be used as the rhs of a constraint. */
3511
3512 static void
3513 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3514 {
3515 gcc_assert (results->length () == 0);
3516
3517 get_constraint_for_1 (t, results, false, false);
3518 }
3519
3520
3521 /* Efficiently generates constraints from all entries in *RHSC to all
3522 entries in *LHSC. */
3523
3524 static void
3525 process_all_all_constraints (vec<ce_s> lhsc,
3526 vec<ce_s> rhsc)
3527 {
3528 struct constraint_expr *lhsp, *rhsp;
3529 unsigned i, j;
3530
3531 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3532 {
3533 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3534 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3535 process_constraint (new_constraint (*lhsp, *rhsp));
3536 }
3537 else
3538 {
3539 struct constraint_expr tmp;
3540 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3541 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3542 process_constraint (new_constraint (tmp, *rhsp));
3543 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3544 process_constraint (new_constraint (*lhsp, tmp));
3545 }
3546 }
3547
3548 /* Handle aggregate copies by expanding into copies of the respective
3549 fields of the structures. */
3550
3551 static void
3552 do_structure_copy (tree lhsop, tree rhsop)
3553 {
3554 struct constraint_expr *lhsp, *rhsp;
3555 vec<ce_s> lhsc = vNULL;
3556 vec<ce_s> rhsc = vNULL;
3557 unsigned j;
3558
3559 get_constraint_for (lhsop, &lhsc);
3560 get_constraint_for_rhs (rhsop, &rhsc);
3561 lhsp = &lhsc[0];
3562 rhsp = &rhsc[0];
3563 if (lhsp->type == DEREF
3564 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3565 || rhsp->type == DEREF)
3566 {
3567 if (lhsp->type == DEREF)
3568 {
3569 gcc_assert (lhsc.length () == 1);
3570 lhsp->offset = UNKNOWN_OFFSET;
3571 }
3572 if (rhsp->type == DEREF)
3573 {
3574 gcc_assert (rhsc.length () == 1);
3575 rhsp->offset = UNKNOWN_OFFSET;
3576 }
3577 process_all_all_constraints (lhsc, rhsc);
3578 }
3579 else if (lhsp->type == SCALAR
3580 && (rhsp->type == SCALAR
3581 || rhsp->type == ADDRESSOF))
3582 {
3583 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3584 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3585 unsigned k = 0;
3586 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3587 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3588 for (j = 0; lhsc.iterate (j, &lhsp);)
3589 {
3590 varinfo_t lhsv, rhsv;
3591 rhsp = &rhsc[k];
3592 lhsv = get_varinfo (lhsp->var);
3593 rhsv = get_varinfo (rhsp->var);
3594 if (lhsv->may_have_pointers
3595 && (lhsv->is_full_var
3596 || rhsv->is_full_var
3597 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3598 rhsv->offset + lhsoffset, rhsv->size)))
3599 process_constraint (new_constraint (*lhsp, *rhsp));
3600 if (!rhsv->is_full_var
3601 && (lhsv->is_full_var
3602 || (lhsv->offset + rhsoffset + lhsv->size
3603 > rhsv->offset + lhsoffset + rhsv->size)))
3604 {
3605 ++k;
3606 if (k >= rhsc.length ())
3607 break;
3608 }
3609 else
3610 ++j;
3611 }
3612 }
3613 else
3614 gcc_unreachable ();
3615
3616 lhsc.release ();
3617 rhsc.release ();
3618 }
3619
3620 /* Create constraints ID = { rhsc }. */
3621
3622 static void
3623 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3624 {
3625 struct constraint_expr *c;
3626 struct constraint_expr includes;
3627 unsigned int j;
3628
3629 includes.var = id;
3630 includes.offset = 0;
3631 includes.type = SCALAR;
3632
3633 FOR_EACH_VEC_ELT (rhsc, j, c)
3634 process_constraint (new_constraint (includes, *c));
3635 }
3636
3637 /* Create a constraint ID = OP. */
3638
3639 static void
3640 make_constraint_to (unsigned id, tree op)
3641 {
3642 vec<ce_s> rhsc = vNULL;
3643 get_constraint_for_rhs (op, &rhsc);
3644 make_constraints_to (id, rhsc);
3645 rhsc.release ();
3646 }
3647
3648 /* Create a constraint ID = &FROM. */
3649
3650 static void
3651 make_constraint_from (varinfo_t vi, int from)
3652 {
3653 struct constraint_expr lhs, rhs;
3654
3655 lhs.var = vi->id;
3656 lhs.offset = 0;
3657 lhs.type = SCALAR;
3658
3659 rhs.var = from;
3660 rhs.offset = 0;
3661 rhs.type = ADDRESSOF;
3662 process_constraint (new_constraint (lhs, rhs));
3663 }
3664
3665 /* Create a constraint ID = FROM. */
3666
3667 static void
3668 make_copy_constraint (varinfo_t vi, int from)
3669 {
3670 struct constraint_expr lhs, rhs;
3671
3672 lhs.var = vi->id;
3673 lhs.offset = 0;
3674 lhs.type = SCALAR;
3675
3676 rhs.var = from;
3677 rhs.offset = 0;
3678 rhs.type = SCALAR;
3679 process_constraint (new_constraint (lhs, rhs));
3680 }
3681
3682 /* Make constraints necessary to make OP escape. */
3683
3684 static void
3685 make_escape_constraint (tree op)
3686 {
3687 make_constraint_to (escaped_id, op);
3688 }
3689
3690 /* Add constraints to that the solution of VI is transitively closed. */
3691
3692 static void
3693 make_transitive_closure_constraints (varinfo_t vi)
3694 {
3695 struct constraint_expr lhs, rhs;
3696
3697 /* VAR = *VAR; */
3698 lhs.type = SCALAR;
3699 lhs.var = vi->id;
3700 lhs.offset = 0;
3701 rhs.type = DEREF;
3702 rhs.var = vi->id;
3703 rhs.offset = 0;
3704 process_constraint (new_constraint (lhs, rhs));
3705
3706 /* VAR = VAR + UNKNOWN; */
3707 lhs.type = SCALAR;
3708 lhs.var = vi->id;
3709 lhs.offset = 0;
3710 rhs.type = SCALAR;
3711 rhs.var = vi->id;
3712 rhs.offset = UNKNOWN_OFFSET;
3713 process_constraint (new_constraint (lhs, rhs));
3714 }
3715
3716 /* Temporary storage for fake var decls. */
3717 struct obstack fake_var_decl_obstack;
3718
3719 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3720
3721 static tree
3722 build_fake_var_decl (tree type)
3723 {
3724 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3725 memset (decl, 0, sizeof (struct tree_var_decl));
3726 TREE_SET_CODE (decl, VAR_DECL);
3727 TREE_TYPE (decl) = type;
3728 DECL_UID (decl) = allocate_decl_uid ();
3729 SET_DECL_PT_UID (decl, -1);
3730 layout_decl (decl, 0);
3731 return decl;
3732 }
3733
3734 /* Create a new artificial heap variable with NAME.
3735 Return the created variable. */
3736
3737 static varinfo_t
3738 make_heapvar (const char *name)
3739 {
3740 varinfo_t vi;
3741 tree heapvar;
3742
3743 heapvar = build_fake_var_decl (ptr_type_node);
3744 DECL_EXTERNAL (heapvar) = 1;
3745
3746 vi = new_var_info (heapvar, name);
3747 vi->is_artificial_var = true;
3748 vi->is_heap_var = true;
3749 vi->is_unknown_size_var = true;
3750 vi->offset = 0;
3751 vi->fullsize = ~0;
3752 vi->size = ~0;
3753 vi->is_full_var = true;
3754 insert_vi_for_tree (heapvar, vi);
3755
3756 return vi;
3757 }
3758
3759 /* Create a new artificial heap variable with NAME and make a
3760 constraint from it to LHS. Set flags according to a tag used
3761 for tracking restrict pointers. */
3762
3763 static varinfo_t
3764 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3765 {
3766 varinfo_t vi = make_heapvar (name);
3767 vi->is_global_var = 1;
3768 vi->may_have_pointers = 1;
3769 make_constraint_from (lhs, vi->id);
3770 return vi;
3771 }
3772
3773 /* Create a new artificial heap variable with NAME and make a
3774 constraint from it to LHS. Set flags according to a tag used
3775 for tracking restrict pointers and make the artificial heap
3776 point to global memory. */
3777
3778 static varinfo_t
3779 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3780 {
3781 varinfo_t vi = make_constraint_from_restrict (lhs, name);
3782 make_copy_constraint (vi, nonlocal_id);
3783 return vi;
3784 }
3785
3786 /* In IPA mode there are varinfos for different aspects of reach
3787 function designator. One for the points-to set of the return
3788 value, one for the variables that are clobbered by the function,
3789 one for its uses and one for each parameter (including a single
3790 glob for remaining variadic arguments). */
3791
3792 enum { fi_clobbers = 1, fi_uses = 2,
3793 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3794
3795 /* Get a constraint for the requested part of a function designator FI
3796 when operating in IPA mode. */
3797
3798 static struct constraint_expr
3799 get_function_part_constraint (varinfo_t fi, unsigned part)
3800 {
3801 struct constraint_expr c;
3802
3803 gcc_assert (in_ipa_mode);
3804
3805 if (fi->id == anything_id)
3806 {
3807 /* ??? We probably should have a ANYFN special variable. */
3808 c.var = anything_id;
3809 c.offset = 0;
3810 c.type = SCALAR;
3811 }
3812 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3813 {
3814 varinfo_t ai = first_vi_for_offset (fi, part);
3815 if (ai)
3816 c.var = ai->id;
3817 else
3818 c.var = anything_id;
3819 c.offset = 0;
3820 c.type = SCALAR;
3821 }
3822 else
3823 {
3824 c.var = fi->id;
3825 c.offset = part;
3826 c.type = DEREF;
3827 }
3828
3829 return c;
3830 }
3831
3832 /* For non-IPA mode, generate constraints necessary for a call on the
3833 RHS. */
3834
3835 static void
3836 handle_rhs_call (gimple stmt, vec<ce_s> *results)
3837 {
3838 struct constraint_expr rhsc;
3839 unsigned i;
3840 bool returns_uses = false;
3841
3842 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3843 {
3844 tree arg = gimple_call_arg (stmt, i);
3845 int flags = gimple_call_arg_flags (stmt, i);
3846
3847 /* If the argument is not used we can ignore it. */
3848 if (flags & EAF_UNUSED)
3849 continue;
3850
3851 /* As we compute ESCAPED context-insensitive we do not gain
3852 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3853 set. The argument would still get clobbered through the
3854 escape solution. */
3855 if ((flags & EAF_NOCLOBBER)
3856 && (flags & EAF_NOESCAPE))
3857 {
3858 varinfo_t uses = get_call_use_vi (stmt);
3859 if (!(flags & EAF_DIRECT))
3860 {
3861 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3862 make_constraint_to (tem->id, arg);
3863 make_transitive_closure_constraints (tem);
3864 make_copy_constraint (uses, tem->id);
3865 }
3866 else
3867 make_constraint_to (uses->id, arg);
3868 returns_uses = true;
3869 }
3870 else if (flags & EAF_NOESCAPE)
3871 {
3872 struct constraint_expr lhs, rhs;
3873 varinfo_t uses = get_call_use_vi (stmt);
3874 varinfo_t clobbers = get_call_clobber_vi (stmt);
3875 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3876 make_constraint_to (tem->id, arg);
3877 if (!(flags & EAF_DIRECT))
3878 make_transitive_closure_constraints (tem);
3879 make_copy_constraint (uses, tem->id);
3880 make_copy_constraint (clobbers, tem->id);
3881 /* Add *tem = nonlocal, do not add *tem = callused as
3882 EAF_NOESCAPE parameters do not escape to other parameters
3883 and all other uses appear in NONLOCAL as well. */
3884 lhs.type = DEREF;
3885 lhs.var = tem->id;
3886 lhs.offset = 0;
3887 rhs.type = SCALAR;
3888 rhs.var = nonlocal_id;
3889 rhs.offset = 0;
3890 process_constraint (new_constraint (lhs, rhs));
3891 returns_uses = true;
3892 }
3893 else
3894 make_escape_constraint (arg);
3895 }
3896
3897 /* If we added to the calls uses solution make sure we account for
3898 pointers to it to be returned. */
3899 if (returns_uses)
3900 {
3901 rhsc.var = get_call_use_vi (stmt)->id;
3902 rhsc.offset = 0;
3903 rhsc.type = SCALAR;
3904 results->safe_push (rhsc);
3905 }
3906
3907 /* The static chain escapes as well. */
3908 if (gimple_call_chain (stmt))
3909 make_escape_constraint (gimple_call_chain (stmt));
3910
3911 /* And if we applied NRV the address of the return slot escapes as well. */
3912 if (gimple_call_return_slot_opt_p (stmt)
3913 && gimple_call_lhs (stmt) != NULL_TREE
3914 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3915 {
3916 vec<ce_s> tmpc = vNULL;
3917 struct constraint_expr lhsc, *c;
3918 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3919 lhsc.var = escaped_id;
3920 lhsc.offset = 0;
3921 lhsc.type = SCALAR;
3922 FOR_EACH_VEC_ELT (tmpc, i, c)
3923 process_constraint (new_constraint (lhsc, *c));
3924 tmpc.release ();
3925 }
3926
3927 /* Regular functions return nonlocal memory. */
3928 rhsc.var = nonlocal_id;
3929 rhsc.offset = 0;
3930 rhsc.type = SCALAR;
3931 results->safe_push (rhsc);
3932 }
3933
3934 /* For non-IPA mode, generate constraints necessary for a call
3935 that returns a pointer and assigns it to LHS. This simply makes
3936 the LHS point to global and escaped variables. */
3937
3938 static void
3939 handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
3940 tree fndecl)
3941 {
3942 vec<ce_s> lhsc = vNULL;
3943
3944 get_constraint_for (lhs, &lhsc);
3945 /* If the store is to a global decl make sure to
3946 add proper escape constraints. */
3947 lhs = get_base_address (lhs);
3948 if (lhs
3949 && DECL_P (lhs)
3950 && is_global_var (lhs))
3951 {
3952 struct constraint_expr tmpc;
3953 tmpc.var = escaped_id;
3954 tmpc.offset = 0;
3955 tmpc.type = SCALAR;
3956 lhsc.safe_push (tmpc);
3957 }
3958
3959 /* If the call returns an argument unmodified override the rhs
3960 constraints. */
3961 flags = gimple_call_return_flags (stmt);
3962 if (flags & ERF_RETURNS_ARG
3963 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3964 {
3965 tree arg;
3966 rhsc.create (0);
3967 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3968 get_constraint_for (arg, &rhsc);
3969 process_all_all_constraints (lhsc, rhsc);
3970 rhsc.release ();
3971 }
3972 else if (flags & ERF_NOALIAS)
3973 {
3974 varinfo_t vi;
3975 struct constraint_expr tmpc;
3976 rhsc.create (0);
3977 vi = make_heapvar ("HEAP");
3978 /* We delay marking allocated storage global until we know if
3979 it escapes. */
3980 DECL_EXTERNAL (vi->decl) = 0;
3981 vi->is_global_var = 0;
3982 /* If this is not a real malloc call assume the memory was
3983 initialized and thus may point to global memory. All
3984 builtin functions with the malloc attribute behave in a sane way. */
3985 if (!fndecl
3986 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3987 make_constraint_from (vi, nonlocal_id);
3988 tmpc.var = vi->id;
3989 tmpc.offset = 0;
3990 tmpc.type = ADDRESSOF;
3991 rhsc.safe_push (tmpc);
3992 process_all_all_constraints (lhsc, rhsc);
3993 rhsc.release ();
3994 }
3995 else
3996 process_all_all_constraints (lhsc, rhsc);
3997
3998 lhsc.release ();
3999 }
4000
4001 /* For non-IPA mode, generate constraints necessary for a call of a
4002 const function that returns a pointer in the statement STMT. */
4003
4004 static void
4005 handle_const_call (gimple stmt, vec<ce_s> *results)
4006 {
4007 struct constraint_expr rhsc;
4008 unsigned int k;
4009
4010 /* Treat nested const functions the same as pure functions as far
4011 as the static chain is concerned. */
4012 if (gimple_call_chain (stmt))
4013 {
4014 varinfo_t uses = get_call_use_vi (stmt);
4015 make_transitive_closure_constraints (uses);
4016 make_constraint_to (uses->id, gimple_call_chain (stmt));
4017 rhsc.var = uses->id;
4018 rhsc.offset = 0;
4019 rhsc.type = SCALAR;
4020 results->safe_push (rhsc);
4021 }
4022
4023 /* May return arguments. */
4024 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4025 {
4026 tree arg = gimple_call_arg (stmt, k);
4027 vec<ce_s> argc = vNULL;
4028 unsigned i;
4029 struct constraint_expr *argp;
4030 get_constraint_for_rhs (arg, &argc);
4031 FOR_EACH_VEC_ELT (argc, i, argp)
4032 results->safe_push (*argp);
4033 argc.release ();
4034 }
4035
4036 /* May return addresses of globals. */
4037 rhsc.var = nonlocal_id;
4038 rhsc.offset = 0;
4039 rhsc.type = ADDRESSOF;
4040 results->safe_push (rhsc);
4041 }
4042
4043 /* For non-IPA mode, generate constraints necessary for a call to a
4044 pure function in statement STMT. */
4045
4046 static void
4047 handle_pure_call (gimple stmt, vec<ce_s> *results)
4048 {
4049 struct constraint_expr rhsc;
4050 unsigned i;
4051 varinfo_t uses = NULL;
4052
4053 /* Memory reached from pointer arguments is call-used. */
4054 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4055 {
4056 tree arg = gimple_call_arg (stmt, i);
4057 if (!uses)
4058 {
4059 uses = get_call_use_vi (stmt);
4060 make_transitive_closure_constraints (uses);
4061 }
4062 make_constraint_to (uses->id, arg);
4063 }
4064
4065 /* The static chain is used as well. */
4066 if (gimple_call_chain (stmt))
4067 {
4068 if (!uses)
4069 {
4070 uses = get_call_use_vi (stmt);
4071 make_transitive_closure_constraints (uses);
4072 }
4073 make_constraint_to (uses->id, gimple_call_chain (stmt));
4074 }
4075
4076 /* Pure functions may return call-used and nonlocal memory. */
4077 if (uses)
4078 {
4079 rhsc.var = uses->id;
4080 rhsc.offset = 0;
4081 rhsc.type = SCALAR;
4082 results->safe_push (rhsc);
4083 }
4084 rhsc.var = nonlocal_id;
4085 rhsc.offset = 0;
4086 rhsc.type = SCALAR;
4087 results->safe_push (rhsc);
4088 }
4089
4090
4091 /* Return the varinfo for the callee of CALL. */
4092
4093 static varinfo_t
4094 get_fi_for_callee (gimple call)
4095 {
4096 tree decl, fn = gimple_call_fn (call);
4097
4098 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4099 fn = OBJ_TYPE_REF_EXPR (fn);
4100
4101 /* If we can directly resolve the function being called, do so.
4102 Otherwise, it must be some sort of indirect expression that
4103 we should still be able to handle. */
4104 decl = gimple_call_addr_fndecl (fn);
4105 if (decl)
4106 return get_vi_for_tree (decl);
4107
4108 /* If the function is anything other than a SSA name pointer we have no
4109 clue and should be getting ANYFN (well, ANYTHING for now). */
4110 if (!fn || TREE_CODE (fn) != SSA_NAME)
4111 return get_varinfo (anything_id);
4112
4113 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4114 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4115 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4116 fn = SSA_NAME_VAR (fn);
4117
4118 return get_vi_for_tree (fn);
4119 }
4120
4121 /* Create constraints for the builtin call T. Return true if the call
4122 was handled, otherwise false. */
4123
4124 static bool
4125 find_func_aliases_for_builtin_call (gimple t)
4126 {
4127 tree fndecl = gimple_call_fndecl (t);
4128 vec<ce_s> lhsc = vNULL;
4129 vec<ce_s> rhsc = vNULL;
4130 varinfo_t fi;
4131
4132 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4133 /* ??? All builtins that are handled here need to be handled
4134 in the alias-oracle query functions explicitly! */
4135 switch (DECL_FUNCTION_CODE (fndecl))
4136 {
4137 /* All the following functions return a pointer to the same object
4138 as their first argument points to. The functions do not add
4139 to the ESCAPED solution. The functions make the first argument
4140 pointed to memory point to what the second argument pointed to
4141 memory points to. */
4142 case BUILT_IN_STRCPY:
4143 case BUILT_IN_STRNCPY:
4144 case BUILT_IN_BCOPY:
4145 case BUILT_IN_MEMCPY:
4146 case BUILT_IN_MEMMOVE:
4147 case BUILT_IN_MEMPCPY:
4148 case BUILT_IN_STPCPY:
4149 case BUILT_IN_STPNCPY:
4150 case BUILT_IN_STRCAT:
4151 case BUILT_IN_STRNCAT:
4152 case BUILT_IN_STRCPY_CHK:
4153 case BUILT_IN_STRNCPY_CHK:
4154 case BUILT_IN_MEMCPY_CHK:
4155 case BUILT_IN_MEMMOVE_CHK:
4156 case BUILT_IN_MEMPCPY_CHK:
4157 case BUILT_IN_STPCPY_CHK:
4158 case BUILT_IN_STPNCPY_CHK:
4159 case BUILT_IN_STRCAT_CHK:
4160 case BUILT_IN_STRNCAT_CHK:
4161 case BUILT_IN_TM_MEMCPY:
4162 case BUILT_IN_TM_MEMMOVE:
4163 {
4164 tree res = gimple_call_lhs (t);
4165 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4166 == BUILT_IN_BCOPY ? 1 : 0));
4167 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4168 == BUILT_IN_BCOPY ? 0 : 1));
4169 if (res != NULL_TREE)
4170 {
4171 get_constraint_for (res, &lhsc);
4172 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4173 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4174 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4175 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4176 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4177 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4178 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4179 else
4180 get_constraint_for (dest, &rhsc);
4181 process_all_all_constraints (lhsc, rhsc);
4182 lhsc.release ();
4183 rhsc.release ();
4184 }
4185 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4186 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4187 do_deref (&lhsc);
4188 do_deref (&rhsc);
4189 process_all_all_constraints (lhsc, rhsc);
4190 lhsc.release ();
4191 rhsc.release ();
4192 return true;
4193 }
4194 case BUILT_IN_MEMSET:
4195 case BUILT_IN_MEMSET_CHK:
4196 case BUILT_IN_TM_MEMSET:
4197 {
4198 tree res = gimple_call_lhs (t);
4199 tree dest = gimple_call_arg (t, 0);
4200 unsigned i;
4201 ce_s *lhsp;
4202 struct constraint_expr ac;
4203 if (res != NULL_TREE)
4204 {
4205 get_constraint_for (res, &lhsc);
4206 get_constraint_for (dest, &rhsc);
4207 process_all_all_constraints (lhsc, rhsc);
4208 lhsc.release ();
4209 rhsc.release ();
4210 }
4211 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4212 do_deref (&lhsc);
4213 if (flag_delete_null_pointer_checks
4214 && integer_zerop (gimple_call_arg (t, 1)))
4215 {
4216 ac.type = ADDRESSOF;
4217 ac.var = nothing_id;
4218 }
4219 else
4220 {
4221 ac.type = SCALAR;
4222 ac.var = integer_id;
4223 }
4224 ac.offset = 0;
4225 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4226 process_constraint (new_constraint (*lhsp, ac));
4227 lhsc.release ();
4228 return true;
4229 }
4230 case BUILT_IN_ASSUME_ALIGNED:
4231 {
4232 tree res = gimple_call_lhs (t);
4233 tree dest = gimple_call_arg (t, 0);
4234 if (res != NULL_TREE)
4235 {
4236 get_constraint_for (res, &lhsc);
4237 get_constraint_for (dest, &rhsc);
4238 process_all_all_constraints (lhsc, rhsc);
4239 lhsc.release ();
4240 rhsc.release ();
4241 }
4242 return true;
4243 }
4244 /* All the following functions do not return pointers, do not
4245 modify the points-to sets of memory reachable from their
4246 arguments and do not add to the ESCAPED solution. */
4247 case BUILT_IN_SINCOS:
4248 case BUILT_IN_SINCOSF:
4249 case BUILT_IN_SINCOSL:
4250 case BUILT_IN_FREXP:
4251 case BUILT_IN_FREXPF:
4252 case BUILT_IN_FREXPL:
4253 case BUILT_IN_GAMMA_R:
4254 case BUILT_IN_GAMMAF_R:
4255 case BUILT_IN_GAMMAL_R:
4256 case BUILT_IN_LGAMMA_R:
4257 case BUILT_IN_LGAMMAF_R:
4258 case BUILT_IN_LGAMMAL_R:
4259 case BUILT_IN_MODF:
4260 case BUILT_IN_MODFF:
4261 case BUILT_IN_MODFL:
4262 case BUILT_IN_REMQUO:
4263 case BUILT_IN_REMQUOF:
4264 case BUILT_IN_REMQUOL:
4265 case BUILT_IN_FREE:
4266 return true;
4267 case BUILT_IN_STRDUP:
4268 case BUILT_IN_STRNDUP:
4269 if (gimple_call_lhs (t))
4270 {
4271 handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4272 vNULL, fndecl);
4273 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4274 NULL_TREE, &lhsc);
4275 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4276 NULL_TREE, &rhsc);
4277 do_deref (&lhsc);
4278 do_deref (&rhsc);
4279 process_all_all_constraints (lhsc, rhsc);
4280 lhsc.release ();
4281 rhsc.release ();
4282 return true;
4283 }
4284 break;
4285 /* String / character search functions return a pointer into the
4286 source string or NULL. */
4287 case BUILT_IN_INDEX:
4288 case BUILT_IN_STRCHR:
4289 case BUILT_IN_STRRCHR:
4290 case BUILT_IN_MEMCHR:
4291 case BUILT_IN_STRSTR:
4292 case BUILT_IN_STRPBRK:
4293 if (gimple_call_lhs (t))
4294 {
4295 tree src = gimple_call_arg (t, 0);
4296 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4297 constraint_expr nul;
4298 nul.var = nothing_id;
4299 nul.offset = 0;
4300 nul.type = ADDRESSOF;
4301 rhsc.safe_push (nul);
4302 get_constraint_for (gimple_call_lhs (t), &lhsc);
4303 process_all_all_constraints (lhsc, rhsc);
4304 lhsc.release();
4305 rhsc.release();
4306 }
4307 return true;
4308 /* Trampolines are special - they set up passing the static
4309 frame. */
4310 case BUILT_IN_INIT_TRAMPOLINE:
4311 {
4312 tree tramp = gimple_call_arg (t, 0);
4313 tree nfunc = gimple_call_arg (t, 1);
4314 tree frame = gimple_call_arg (t, 2);
4315 unsigned i;
4316 struct constraint_expr lhs, *rhsp;
4317 if (in_ipa_mode)
4318 {
4319 varinfo_t nfi = NULL;
4320 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4321 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4322 if (nfi)
4323 {
4324 lhs = get_function_part_constraint (nfi, fi_static_chain);
4325 get_constraint_for (frame, &rhsc);
4326 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4327 process_constraint (new_constraint (lhs, *rhsp));
4328 rhsc.release ();
4329
4330 /* Make the frame point to the function for
4331 the trampoline adjustment call. */
4332 get_constraint_for (tramp, &lhsc);
4333 do_deref (&lhsc);
4334 get_constraint_for (nfunc, &rhsc);
4335 process_all_all_constraints (lhsc, rhsc);
4336 rhsc.release ();
4337 lhsc.release ();
4338
4339 return true;
4340 }
4341 }
4342 /* Else fallthru to generic handling which will let
4343 the frame escape. */
4344 break;
4345 }
4346 case BUILT_IN_ADJUST_TRAMPOLINE:
4347 {
4348 tree tramp = gimple_call_arg (t, 0);
4349 tree res = gimple_call_lhs (t);
4350 if (in_ipa_mode && res)
4351 {
4352 get_constraint_for (res, &lhsc);
4353 get_constraint_for (tramp, &rhsc);
4354 do_deref (&rhsc);
4355 process_all_all_constraints (lhsc, rhsc);
4356 rhsc.release ();
4357 lhsc.release ();
4358 }
4359 return true;
4360 }
4361 CASE_BUILT_IN_TM_STORE (1):
4362 CASE_BUILT_IN_TM_STORE (2):
4363 CASE_BUILT_IN_TM_STORE (4):
4364 CASE_BUILT_IN_TM_STORE (8):
4365 CASE_BUILT_IN_TM_STORE (FLOAT):
4366 CASE_BUILT_IN_TM_STORE (DOUBLE):
4367 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4368 CASE_BUILT_IN_TM_STORE (M64):
4369 CASE_BUILT_IN_TM_STORE (M128):
4370 CASE_BUILT_IN_TM_STORE (M256):
4371 {
4372 tree addr = gimple_call_arg (t, 0);
4373 tree src = gimple_call_arg (t, 1);
4374
4375 get_constraint_for (addr, &lhsc);
4376 do_deref (&lhsc);
4377 get_constraint_for (src, &rhsc);
4378 process_all_all_constraints (lhsc, rhsc);
4379 lhsc.release ();
4380 rhsc.release ();
4381 return true;
4382 }
4383 CASE_BUILT_IN_TM_LOAD (1):
4384 CASE_BUILT_IN_TM_LOAD (2):
4385 CASE_BUILT_IN_TM_LOAD (4):
4386 CASE_BUILT_IN_TM_LOAD (8):
4387 CASE_BUILT_IN_TM_LOAD (FLOAT):
4388 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4389 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4390 CASE_BUILT_IN_TM_LOAD (M64):
4391 CASE_BUILT_IN_TM_LOAD (M128):
4392 CASE_BUILT_IN_TM_LOAD (M256):
4393 {
4394 tree dest = gimple_call_lhs (t);
4395 tree addr = gimple_call_arg (t, 0);
4396
4397 get_constraint_for (dest, &lhsc);
4398 get_constraint_for (addr, &rhsc);
4399 do_deref (&rhsc);
4400 process_all_all_constraints (lhsc, rhsc);
4401 lhsc.release ();
4402 rhsc.release ();
4403 return true;
4404 }
4405 /* Variadic argument handling needs to be handled in IPA
4406 mode as well. */
4407 case BUILT_IN_VA_START:
4408 {
4409 tree valist = gimple_call_arg (t, 0);
4410 struct constraint_expr rhs, *lhsp;
4411 unsigned i;
4412 get_constraint_for (valist, &lhsc);
4413 do_deref (&lhsc);
4414 /* The va_list gets access to pointers in variadic
4415 arguments. Which we know in the case of IPA analysis
4416 and otherwise are just all nonlocal variables. */
4417 if (in_ipa_mode)
4418 {
4419 fi = lookup_vi_for_tree (cfun->decl);
4420 rhs = get_function_part_constraint (fi, ~0);
4421 rhs.type = ADDRESSOF;
4422 }
4423 else
4424 {
4425 rhs.var = nonlocal_id;
4426 rhs.type = ADDRESSOF;
4427 rhs.offset = 0;
4428 }
4429 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4430 process_constraint (new_constraint (*lhsp, rhs));
4431 lhsc.release ();
4432 /* va_list is clobbered. */
4433 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4434 return true;
4435 }
4436 /* va_end doesn't have any effect that matters. */
4437 case BUILT_IN_VA_END:
4438 return true;
4439 /* Alternate return. Simply give up for now. */
4440 case BUILT_IN_RETURN:
4441 {
4442 fi = NULL;
4443 if (!in_ipa_mode
4444 || !(fi = get_vi_for_tree (cfun->decl)))
4445 make_constraint_from (get_varinfo (escaped_id), anything_id);
4446 else if (in_ipa_mode
4447 && fi != NULL)
4448 {
4449 struct constraint_expr lhs, rhs;
4450 lhs = get_function_part_constraint (fi, fi_result);
4451 rhs.var = anything_id;
4452 rhs.offset = 0;
4453 rhs.type = SCALAR;
4454 process_constraint (new_constraint (lhs, rhs));
4455 }
4456 return true;
4457 }
4458 /* printf-style functions may have hooks to set pointers to
4459 point to somewhere into the generated string. Leave them
4460 for a later excercise... */
4461 default:
4462 /* Fallthru to general call handling. */;
4463 }
4464
4465 return false;
4466 }
4467
4468 /* Create constraints for the call T. */
4469
4470 static void
4471 find_func_aliases_for_call (gimple t)
4472 {
4473 tree fndecl = gimple_call_fndecl (t);
4474 vec<ce_s> lhsc = vNULL;
4475 vec<ce_s> rhsc = vNULL;
4476 varinfo_t fi;
4477
4478 if (fndecl != NULL_TREE
4479 && DECL_BUILT_IN (fndecl)
4480 && find_func_aliases_for_builtin_call (t))
4481 return;
4482
4483 fi = get_fi_for_callee (t);
4484 if (!in_ipa_mode
4485 || (fndecl && !fi->is_fn_info))
4486 {
4487 vec<ce_s> rhsc = vNULL;
4488 int flags = gimple_call_flags (t);
4489
4490 /* Const functions can return their arguments and addresses
4491 of global memory but not of escaped memory. */
4492 if (flags & (ECF_CONST|ECF_NOVOPS))
4493 {
4494 if (gimple_call_lhs (t))
4495 handle_const_call (t, &rhsc);
4496 }
4497 /* Pure functions can return addresses in and of memory
4498 reachable from their arguments, but they are not an escape
4499 point for reachable memory of their arguments. */
4500 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4501 handle_pure_call (t, &rhsc);
4502 else
4503 handle_rhs_call (t, &rhsc);
4504 if (gimple_call_lhs (t))
4505 handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4506 rhsc.release ();
4507 }
4508 else
4509 {
4510 tree lhsop;
4511 unsigned j;
4512
4513 /* Assign all the passed arguments to the appropriate incoming
4514 parameters of the function. */
4515 for (j = 0; j < gimple_call_num_args (t); j++)
4516 {
4517 struct constraint_expr lhs ;
4518 struct constraint_expr *rhsp;
4519 tree arg = gimple_call_arg (t, j);
4520
4521 get_constraint_for_rhs (arg, &rhsc);
4522 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4523 while (rhsc.length () != 0)
4524 {
4525 rhsp = &rhsc.last ();
4526 process_constraint (new_constraint (lhs, *rhsp));
4527 rhsc.pop ();
4528 }
4529 }
4530
4531 /* If we are returning a value, assign it to the result. */
4532 lhsop = gimple_call_lhs (t);
4533 if (lhsop)
4534 {
4535 struct constraint_expr rhs;
4536 struct constraint_expr *lhsp;
4537
4538 get_constraint_for (lhsop, &lhsc);
4539 rhs = get_function_part_constraint (fi, fi_result);
4540 if (fndecl
4541 && DECL_RESULT (fndecl)
4542 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4543 {
4544 vec<ce_s> tem = vNULL;
4545 tem.safe_push (rhs);
4546 do_deref (&tem);
4547 rhs = tem[0];
4548 tem.release ();
4549 }
4550 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4551 process_constraint (new_constraint (*lhsp, rhs));
4552 }
4553
4554 /* If we pass the result decl by reference, honor that. */
4555 if (lhsop
4556 && fndecl
4557 && DECL_RESULT (fndecl)
4558 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4559 {
4560 struct constraint_expr lhs;
4561 struct constraint_expr *rhsp;
4562
4563 get_constraint_for_address_of (lhsop, &rhsc);
4564 lhs = get_function_part_constraint (fi, fi_result);
4565 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4566 process_constraint (new_constraint (lhs, *rhsp));
4567 rhsc.release ();
4568 }
4569
4570 /* If we use a static chain, pass it along. */
4571 if (gimple_call_chain (t))
4572 {
4573 struct constraint_expr lhs;
4574 struct constraint_expr *rhsp;
4575
4576 get_constraint_for (gimple_call_chain (t), &rhsc);
4577 lhs = get_function_part_constraint (fi, fi_static_chain);
4578 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4579 process_constraint (new_constraint (lhs, *rhsp));
4580 }
4581 }
4582 }
4583
4584 /* Walk statement T setting up aliasing constraints according to the
4585 references found in T. This function is the main part of the
4586 constraint builder. AI points to auxiliary alias information used
4587 when building alias sets and computing alias grouping heuristics. */
4588
4589 static void
4590 find_func_aliases (gimple origt)
4591 {
4592 gimple t = origt;
4593 vec<ce_s> lhsc = vNULL;
4594 vec<ce_s> rhsc = vNULL;
4595 struct constraint_expr *c;
4596 varinfo_t fi;
4597
4598 /* Now build constraints expressions. */
4599 if (gimple_code (t) == GIMPLE_PHI)
4600 {
4601 size_t i;
4602 unsigned int j;
4603
4604 /* For a phi node, assign all the arguments to
4605 the result. */
4606 get_constraint_for (gimple_phi_result (t), &lhsc);
4607 for (i = 0; i < gimple_phi_num_args (t); i++)
4608 {
4609 tree strippedrhs = PHI_ARG_DEF (t, i);
4610
4611 STRIP_NOPS (strippedrhs);
4612 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4613
4614 FOR_EACH_VEC_ELT (lhsc, j, c)
4615 {
4616 struct constraint_expr *c2;
4617 while (rhsc.length () > 0)
4618 {
4619 c2 = &rhsc.last ();
4620 process_constraint (new_constraint (*c, *c2));
4621 rhsc.pop ();
4622 }
4623 }
4624 }
4625 }
4626 /* In IPA mode, we need to generate constraints to pass call
4627 arguments through their calls. There are two cases,
4628 either a GIMPLE_CALL returning a value, or just a plain
4629 GIMPLE_CALL when we are not.
4630
4631 In non-ipa mode, we need to generate constraints for each
4632 pointer passed by address. */
4633 else if (is_gimple_call (t))
4634 find_func_aliases_for_call (t);
4635
4636 /* Otherwise, just a regular assignment statement. Only care about
4637 operations with pointer result, others are dealt with as escape
4638 points if they have pointer operands. */
4639 else if (is_gimple_assign (t))
4640 {
4641 /* Otherwise, just a regular assignment statement. */
4642 tree lhsop = gimple_assign_lhs (t);
4643 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4644
4645 if (rhsop && TREE_CLOBBER_P (rhsop))
4646 /* Ignore clobbers, they don't actually store anything into
4647 the LHS. */
4648 ;
4649 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4650 do_structure_copy (lhsop, rhsop);
4651 else
4652 {
4653 enum tree_code code = gimple_assign_rhs_code (t);
4654
4655 get_constraint_for (lhsop, &lhsc);
4656
4657 if (FLOAT_TYPE_P (TREE_TYPE (lhsop)))
4658 /* If the operation produces a floating point result then
4659 assume the value is not produced to transfer a pointer. */
4660 ;
4661 else if (code == POINTER_PLUS_EXPR)
4662 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4663 gimple_assign_rhs2 (t), &rhsc);
4664 else if (code == BIT_AND_EXPR
4665 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4666 {
4667 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4668 the pointer. Handle it by offsetting it by UNKNOWN. */
4669 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4670 NULL_TREE, &rhsc);
4671 }
4672 else if ((CONVERT_EXPR_CODE_P (code)
4673 && !(POINTER_TYPE_P (gimple_expr_type (t))
4674 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4675 || gimple_assign_single_p (t))
4676 get_constraint_for_rhs (rhsop, &rhsc);
4677 else if (code == COND_EXPR)
4678 {
4679 /* The result is a merge of both COND_EXPR arms. */
4680 vec<ce_s> tmp = vNULL;
4681 struct constraint_expr *rhsp;
4682 unsigned i;
4683 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4684 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4685 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4686 rhsc.safe_push (*rhsp);
4687 tmp.release ();
4688 }
4689 else if (truth_value_p (code))
4690 /* Truth value results are not pointer (parts). Or at least
4691 very very unreasonable obfuscation of a part. */
4692 ;
4693 else
4694 {
4695 /* All other operations are merges. */
4696 vec<ce_s> tmp = vNULL;
4697 struct constraint_expr *rhsp;
4698 unsigned i, j;
4699 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4700 for (i = 2; i < gimple_num_ops (t); ++i)
4701 {
4702 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4703 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4704 rhsc.safe_push (*rhsp);
4705 tmp.truncate (0);
4706 }
4707 tmp.release ();
4708 }
4709 process_all_all_constraints (lhsc, rhsc);
4710 }
4711 /* If there is a store to a global variable the rhs escapes. */
4712 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4713 && DECL_P (lhsop)
4714 && is_global_var (lhsop)
4715 && (!in_ipa_mode
4716 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4717 make_escape_constraint (rhsop);
4718 }
4719 /* Handle escapes through return. */
4720 else if (gimple_code (t) == GIMPLE_RETURN
4721 && gimple_return_retval (t) != NULL_TREE)
4722 {
4723 fi = NULL;
4724 if (!in_ipa_mode
4725 || !(fi = get_vi_for_tree (cfun->decl)))
4726 make_escape_constraint (gimple_return_retval (t));
4727 else if (in_ipa_mode
4728 && fi != NULL)
4729 {
4730 struct constraint_expr lhs ;
4731 struct constraint_expr *rhsp;
4732 unsigned i;
4733
4734 lhs = get_function_part_constraint (fi, fi_result);
4735 get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4736 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4737 process_constraint (new_constraint (lhs, *rhsp));
4738 }
4739 }
4740 /* Handle asms conservatively by adding escape constraints to everything. */
4741 else if (gimple_code (t) == GIMPLE_ASM)
4742 {
4743 unsigned i, noutputs;
4744 const char **oconstraints;
4745 const char *constraint;
4746 bool allows_mem, allows_reg, is_inout;
4747
4748 noutputs = gimple_asm_noutputs (t);
4749 oconstraints = XALLOCAVEC (const char *, noutputs);
4750
4751 for (i = 0; i < noutputs; ++i)
4752 {
4753 tree link = gimple_asm_output_op (t, i);
4754 tree op = TREE_VALUE (link);
4755
4756 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4757 oconstraints[i] = constraint;
4758 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4759 &allows_reg, &is_inout);
4760
4761 /* A memory constraint makes the address of the operand escape. */
4762 if (!allows_reg && allows_mem)
4763 make_escape_constraint (build_fold_addr_expr (op));
4764
4765 /* The asm may read global memory, so outputs may point to
4766 any global memory. */
4767 if (op)
4768 {
4769 vec<ce_s> lhsc = vNULL;
4770 struct constraint_expr rhsc, *lhsp;
4771 unsigned j;
4772 get_constraint_for (op, &lhsc);
4773 rhsc.var = nonlocal_id;
4774 rhsc.offset = 0;
4775 rhsc.type = SCALAR;
4776 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4777 process_constraint (new_constraint (*lhsp, rhsc));
4778 lhsc.release ();
4779 }
4780 }
4781 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4782 {
4783 tree link = gimple_asm_input_op (t, i);
4784 tree op = TREE_VALUE (link);
4785
4786 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4787
4788 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4789 &allows_mem, &allows_reg);
4790
4791 /* A memory constraint makes the address of the operand escape. */
4792 if (!allows_reg && allows_mem)
4793 make_escape_constraint (build_fold_addr_expr (op));
4794 /* Strictly we'd only need the constraint to ESCAPED if
4795 the asm clobbers memory, otherwise using something
4796 along the lines of per-call clobbers/uses would be enough. */
4797 else if (op)
4798 make_escape_constraint (op);
4799 }
4800 }
4801
4802 rhsc.release ();
4803 lhsc.release ();
4804 }
4805
4806
4807 /* Create a constraint adding to the clobber set of FI the memory
4808 pointed to by PTR. */
4809
4810 static void
4811 process_ipa_clobber (varinfo_t fi, tree ptr)
4812 {
4813 vec<ce_s> ptrc = vNULL;
4814 struct constraint_expr *c, lhs;
4815 unsigned i;
4816 get_constraint_for_rhs (ptr, &ptrc);
4817 lhs = get_function_part_constraint (fi, fi_clobbers);
4818 FOR_EACH_VEC_ELT (ptrc, i, c)
4819 process_constraint (new_constraint (lhs, *c));
4820 ptrc.release ();
4821 }
4822
4823 /* Walk statement T setting up clobber and use constraints according to the
4824 references found in T. This function is a main part of the
4825 IPA constraint builder. */
4826
4827 static void
4828 find_func_clobbers (gimple origt)
4829 {
4830 gimple t = origt;
4831 vec<ce_s> lhsc = vNULL;
4832 vec<ce_s> rhsc = vNULL;
4833 varinfo_t fi;
4834
4835 /* Add constraints for clobbered/used in IPA mode.
4836 We are not interested in what automatic variables are clobbered
4837 or used as we only use the information in the caller to which
4838 they do not escape. */
4839 gcc_assert (in_ipa_mode);
4840
4841 /* If the stmt refers to memory in any way it better had a VUSE. */
4842 if (gimple_vuse (t) == NULL_TREE)
4843 return;
4844
4845 /* We'd better have function information for the current function. */
4846 fi = lookup_vi_for_tree (cfun->decl);
4847 gcc_assert (fi != NULL);
4848
4849 /* Account for stores in assignments and calls. */
4850 if (gimple_vdef (t) != NULL_TREE
4851 && gimple_has_lhs (t))
4852 {
4853 tree lhs = gimple_get_lhs (t);
4854 tree tem = lhs;
4855 while (handled_component_p (tem))
4856 tem = TREE_OPERAND (tem, 0);
4857 if ((DECL_P (tem)
4858 && !auto_var_in_fn_p (tem, cfun->decl))
4859 || INDIRECT_REF_P (tem)
4860 || (TREE_CODE (tem) == MEM_REF
4861 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4862 && auto_var_in_fn_p
4863 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4864 {
4865 struct constraint_expr lhsc, *rhsp;
4866 unsigned i;
4867 lhsc = get_function_part_constraint (fi, fi_clobbers);
4868 get_constraint_for_address_of (lhs, &rhsc);
4869 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4870 process_constraint (new_constraint (lhsc, *rhsp));
4871 rhsc.release ();
4872 }
4873 }
4874
4875 /* Account for uses in assigments and returns. */
4876 if (gimple_assign_single_p (t)
4877 || (gimple_code (t) == GIMPLE_RETURN
4878 && gimple_return_retval (t) != NULL_TREE))
4879 {
4880 tree rhs = (gimple_assign_single_p (t)
4881 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4882 tree tem = rhs;
4883 while (handled_component_p (tem))
4884 tem = TREE_OPERAND (tem, 0);
4885 if ((DECL_P (tem)
4886 && !auto_var_in_fn_p (tem, cfun->decl))
4887 || INDIRECT_REF_P (tem)
4888 || (TREE_CODE (tem) == MEM_REF
4889 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4890 && auto_var_in_fn_p
4891 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4892 {
4893 struct constraint_expr lhs, *rhsp;
4894 unsigned i;
4895 lhs = get_function_part_constraint (fi, fi_uses);
4896 get_constraint_for_address_of (rhs, &rhsc);
4897 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4898 process_constraint (new_constraint (lhs, *rhsp));
4899 rhsc.release ();
4900 }
4901 }
4902
4903 if (is_gimple_call (t))
4904 {
4905 varinfo_t cfi = NULL;
4906 tree decl = gimple_call_fndecl (t);
4907 struct constraint_expr lhs, rhs;
4908 unsigned i, j;
4909
4910 /* For builtins we do not have separate function info. For those
4911 we do not generate escapes for we have to generate clobbers/uses. */
4912 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4913 switch (DECL_FUNCTION_CODE (decl))
4914 {
4915 /* The following functions use and clobber memory pointed to
4916 by their arguments. */
4917 case BUILT_IN_STRCPY:
4918 case BUILT_IN_STRNCPY:
4919 case BUILT_IN_BCOPY:
4920 case BUILT_IN_MEMCPY:
4921 case BUILT_IN_MEMMOVE:
4922 case BUILT_IN_MEMPCPY:
4923 case BUILT_IN_STPCPY:
4924 case BUILT_IN_STPNCPY:
4925 case BUILT_IN_STRCAT:
4926 case BUILT_IN_STRNCAT:
4927 case BUILT_IN_STRCPY_CHK:
4928 case BUILT_IN_STRNCPY_CHK:
4929 case BUILT_IN_MEMCPY_CHK:
4930 case BUILT_IN_MEMMOVE_CHK:
4931 case BUILT_IN_MEMPCPY_CHK:
4932 case BUILT_IN_STPCPY_CHK:
4933 case BUILT_IN_STPNCPY_CHK:
4934 case BUILT_IN_STRCAT_CHK:
4935 case BUILT_IN_STRNCAT_CHK:
4936 {
4937 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4938 == BUILT_IN_BCOPY ? 1 : 0));
4939 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4940 == BUILT_IN_BCOPY ? 0 : 1));
4941 unsigned i;
4942 struct constraint_expr *rhsp, *lhsp;
4943 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4944 lhs = get_function_part_constraint (fi, fi_clobbers);
4945 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4946 process_constraint (new_constraint (lhs, *lhsp));
4947 lhsc.release ();
4948 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4949 lhs = get_function_part_constraint (fi, fi_uses);
4950 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4951 process_constraint (new_constraint (lhs, *rhsp));
4952 rhsc.release ();
4953 return;
4954 }
4955 /* The following function clobbers memory pointed to by
4956 its argument. */
4957 case BUILT_IN_MEMSET:
4958 case BUILT_IN_MEMSET_CHK:
4959 {
4960 tree dest = gimple_call_arg (t, 0);
4961 unsigned i;
4962 ce_s *lhsp;
4963 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4964 lhs = get_function_part_constraint (fi, fi_clobbers);
4965 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4966 process_constraint (new_constraint (lhs, *lhsp));
4967 lhsc.release ();
4968 return;
4969 }
4970 /* The following functions clobber their second and third
4971 arguments. */
4972 case BUILT_IN_SINCOS:
4973 case BUILT_IN_SINCOSF:
4974 case BUILT_IN_SINCOSL:
4975 {
4976 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4977 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4978 return;
4979 }
4980 /* The following functions clobber their second argument. */
4981 case BUILT_IN_FREXP:
4982 case BUILT_IN_FREXPF:
4983 case BUILT_IN_FREXPL:
4984 case BUILT_IN_LGAMMA_R:
4985 case BUILT_IN_LGAMMAF_R:
4986 case BUILT_IN_LGAMMAL_R:
4987 case BUILT_IN_GAMMA_R:
4988 case BUILT_IN_GAMMAF_R:
4989 case BUILT_IN_GAMMAL_R:
4990 case BUILT_IN_MODF:
4991 case BUILT_IN_MODFF:
4992 case BUILT_IN_MODFL:
4993 {
4994 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4995 return;
4996 }
4997 /* The following functions clobber their third argument. */
4998 case BUILT_IN_REMQUO:
4999 case BUILT_IN_REMQUOF:
5000 case BUILT_IN_REMQUOL:
5001 {
5002 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5003 return;
5004 }
5005 /* The following functions neither read nor clobber memory. */
5006 case BUILT_IN_ASSUME_ALIGNED:
5007 case BUILT_IN_FREE:
5008 return;
5009 /* Trampolines are of no interest to us. */
5010 case BUILT_IN_INIT_TRAMPOLINE:
5011 case BUILT_IN_ADJUST_TRAMPOLINE:
5012 return;
5013 case BUILT_IN_VA_START:
5014 case BUILT_IN_VA_END:
5015 return;
5016 /* printf-style functions may have hooks to set pointers to
5017 point to somewhere into the generated string. Leave them
5018 for a later excercise... */
5019 default:
5020 /* Fallthru to general call handling. */;
5021 }
5022
5023 /* Parameters passed by value are used. */
5024 lhs = get_function_part_constraint (fi, fi_uses);
5025 for (i = 0; i < gimple_call_num_args (t); i++)
5026 {
5027 struct constraint_expr *rhsp;
5028 tree arg = gimple_call_arg (t, i);
5029
5030 if (TREE_CODE (arg) == SSA_NAME
5031 || is_gimple_min_invariant (arg))
5032 continue;
5033
5034 get_constraint_for_address_of (arg, &rhsc);
5035 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5036 process_constraint (new_constraint (lhs, *rhsp));
5037 rhsc.release ();
5038 }
5039
5040 /* Build constraints for propagating clobbers/uses along the
5041 callgraph edges. */
5042 cfi = get_fi_for_callee (t);
5043 if (cfi->id == anything_id)
5044 {
5045 if (gimple_vdef (t))
5046 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5047 anything_id);
5048 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5049 anything_id);
5050 return;
5051 }
5052
5053 /* For callees without function info (that's external functions),
5054 ESCAPED is clobbered and used. */
5055 if (gimple_call_fndecl (t)
5056 && !cfi->is_fn_info)
5057 {
5058 varinfo_t vi;
5059
5060 if (gimple_vdef (t))
5061 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5062 escaped_id);
5063 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5064
5065 /* Also honor the call statement use/clobber info. */
5066 if ((vi = lookup_call_clobber_vi (t)) != NULL)
5067 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5068 vi->id);
5069 if ((vi = lookup_call_use_vi (t)) != NULL)
5070 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5071 vi->id);
5072 return;
5073 }
5074
5075 /* Otherwise the caller clobbers and uses what the callee does.
5076 ??? This should use a new complex constraint that filters
5077 local variables of the callee. */
5078 if (gimple_vdef (t))
5079 {
5080 lhs = get_function_part_constraint (fi, fi_clobbers);
5081 rhs = get_function_part_constraint (cfi, fi_clobbers);
5082 process_constraint (new_constraint (lhs, rhs));
5083 }
5084 lhs = get_function_part_constraint (fi, fi_uses);
5085 rhs = get_function_part_constraint (cfi, fi_uses);
5086 process_constraint (new_constraint (lhs, rhs));
5087 }
5088 else if (gimple_code (t) == GIMPLE_ASM)
5089 {
5090 /* ??? Ick. We can do better. */
5091 if (gimple_vdef (t))
5092 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5093 anything_id);
5094 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5095 anything_id);
5096 }
5097
5098 rhsc.release ();
5099 }
5100
5101
5102 /* Find the first varinfo in the same variable as START that overlaps with
5103 OFFSET. Return NULL if we can't find one. */
5104
5105 static varinfo_t
5106 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5107 {
5108 /* If the offset is outside of the variable, bail out. */
5109 if (offset >= start->fullsize)
5110 return NULL;
5111
5112 /* If we cannot reach offset from start, lookup the first field
5113 and start from there. */
5114 if (start->offset > offset)
5115 start = get_varinfo (start->head);
5116
5117 while (start)
5118 {
5119 /* We may not find a variable in the field list with the actual
5120 offset when when we have glommed a structure to a variable.
5121 In that case, however, offset should still be within the size
5122 of the variable. */
5123 if (offset >= start->offset
5124 && (offset - start->offset) < start->size)
5125 return start;
5126
5127 start = vi_next (start);
5128 }
5129
5130 return NULL;
5131 }
5132
5133 /* Find the first varinfo in the same variable as START that overlaps with
5134 OFFSET. If there is no such varinfo the varinfo directly preceding
5135 OFFSET is returned. */
5136
5137 static varinfo_t
5138 first_or_preceding_vi_for_offset (varinfo_t start,
5139 unsigned HOST_WIDE_INT offset)
5140 {
5141 /* If we cannot reach offset from start, lookup the first field
5142 and start from there. */
5143 if (start->offset > offset)
5144 start = get_varinfo (start->head);
5145
5146 /* We may not find a variable in the field list with the actual
5147 offset when when we have glommed a structure to a variable.
5148 In that case, however, offset should still be within the size
5149 of the variable.
5150 If we got beyond the offset we look for return the field
5151 directly preceding offset which may be the last field. */
5152 while (start->next
5153 && offset >= start->offset
5154 && !((offset - start->offset) < start->size))
5155 start = vi_next (start);
5156
5157 return start;
5158 }
5159
5160
5161 /* This structure is used during pushing fields onto the fieldstack
5162 to track the offset of the field, since bitpos_of_field gives it
5163 relative to its immediate containing type, and we want it relative
5164 to the ultimate containing object. */
5165
5166 struct fieldoff
5167 {
5168 /* Offset from the base of the base containing object to this field. */
5169 HOST_WIDE_INT offset;
5170
5171 /* Size, in bits, of the field. */
5172 unsigned HOST_WIDE_INT size;
5173
5174 unsigned has_unknown_size : 1;
5175
5176 unsigned must_have_pointers : 1;
5177
5178 unsigned may_have_pointers : 1;
5179
5180 unsigned only_restrict_pointers : 1;
5181 };
5182 typedef struct fieldoff fieldoff_s;
5183
5184
5185 /* qsort comparison function for two fieldoff's PA and PB */
5186
5187 static int
5188 fieldoff_compare (const void *pa, const void *pb)
5189 {
5190 const fieldoff_s *foa = (const fieldoff_s *)pa;
5191 const fieldoff_s *fob = (const fieldoff_s *)pb;
5192 unsigned HOST_WIDE_INT foasize, fobsize;
5193
5194 if (foa->offset < fob->offset)
5195 return -1;
5196 else if (foa->offset > fob->offset)
5197 return 1;
5198
5199 foasize = foa->size;
5200 fobsize = fob->size;
5201 if (foasize < fobsize)
5202 return -1;
5203 else if (foasize > fobsize)
5204 return 1;
5205 return 0;
5206 }
5207
5208 /* Sort a fieldstack according to the field offset and sizes. */
5209 static void
5210 sort_fieldstack (vec<fieldoff_s> fieldstack)
5211 {
5212 fieldstack.qsort (fieldoff_compare);
5213 }
5214
5215 /* Return true if T is a type that can have subvars. */
5216
5217 static inline bool
5218 type_can_have_subvars (const_tree t)
5219 {
5220 /* Aggregates without overlapping fields can have subvars. */
5221 return TREE_CODE (t) == RECORD_TYPE;
5222 }
5223
5224 /* Return true if V is a tree that we can have subvars for.
5225 Normally, this is any aggregate type. Also complex
5226 types which are not gimple registers can have subvars. */
5227
5228 static inline bool
5229 var_can_have_subvars (const_tree v)
5230 {
5231 /* Volatile variables should never have subvars. */
5232 if (TREE_THIS_VOLATILE (v))
5233 return false;
5234
5235 /* Non decls or memory tags can never have subvars. */
5236 if (!DECL_P (v))
5237 return false;
5238
5239 return type_can_have_subvars (TREE_TYPE (v));
5240 }
5241
5242 /* Return true if T is a type that does contain pointers. */
5243
5244 static bool
5245 type_must_have_pointers (tree type)
5246 {
5247 if (POINTER_TYPE_P (type))
5248 return true;
5249
5250 if (TREE_CODE (type) == ARRAY_TYPE)
5251 return type_must_have_pointers (TREE_TYPE (type));
5252
5253 /* A function or method can have pointers as arguments, so track
5254 those separately. */
5255 if (TREE_CODE (type) == FUNCTION_TYPE
5256 || TREE_CODE (type) == METHOD_TYPE)
5257 return true;
5258
5259 return false;
5260 }
5261
5262 static bool
5263 field_must_have_pointers (tree t)
5264 {
5265 return type_must_have_pointers (TREE_TYPE (t));
5266 }
5267
5268 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5269 the fields of TYPE onto fieldstack, recording their offsets along
5270 the way.
5271
5272 OFFSET is used to keep track of the offset in this entire
5273 structure, rather than just the immediately containing structure.
5274 Returns false if the caller is supposed to handle the field we
5275 recursed for. */
5276
5277 static bool
5278 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5279 HOST_WIDE_INT offset)
5280 {
5281 tree field;
5282 bool empty_p = true;
5283
5284 if (TREE_CODE (type) != RECORD_TYPE)
5285 return false;
5286
5287 /* If the vector of fields is growing too big, bail out early.
5288 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5289 sure this fails. */
5290 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5291 return false;
5292
5293 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5294 if (TREE_CODE (field) == FIELD_DECL)
5295 {
5296 bool push = false;
5297 HOST_WIDE_INT foff = bitpos_of_field (field);
5298
5299 if (!var_can_have_subvars (field)
5300 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5301 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5302 push = true;
5303 else if (!push_fields_onto_fieldstack
5304 (TREE_TYPE (field), fieldstack, offset + foff)
5305 && (DECL_SIZE (field)
5306 && !integer_zerop (DECL_SIZE (field))))
5307 /* Empty structures may have actual size, like in C++. So
5308 see if we didn't push any subfields and the size is
5309 nonzero, push the field onto the stack. */
5310 push = true;
5311
5312 if (push)
5313 {
5314 fieldoff_s *pair = NULL;
5315 bool has_unknown_size = false;
5316 bool must_have_pointers_p;
5317
5318 if (!fieldstack->is_empty ())
5319 pair = &fieldstack->last ();
5320
5321 /* If there isn't anything at offset zero, create sth. */
5322 if (!pair
5323 && offset + foff != 0)
5324 {
5325 fieldoff_s e = {0, offset + foff, false, false, false, false};
5326 pair = fieldstack->safe_push (e);
5327 }
5328
5329 if (!DECL_SIZE (field)
5330 || !host_integerp (DECL_SIZE (field), 1))
5331 has_unknown_size = true;
5332
5333 /* If adjacent fields do not contain pointers merge them. */
5334 must_have_pointers_p = field_must_have_pointers (field);
5335 if (pair
5336 && !has_unknown_size
5337 && !must_have_pointers_p
5338 && !pair->must_have_pointers
5339 && !pair->has_unknown_size
5340 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5341 {
5342 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
5343 }
5344 else
5345 {
5346 fieldoff_s e;
5347 e.offset = offset + foff;
5348 e.has_unknown_size = has_unknown_size;
5349 if (!has_unknown_size)
5350 e.size = TREE_INT_CST_LOW (DECL_SIZE (field));
5351 else
5352 e.size = -1;
5353 e.must_have_pointers = must_have_pointers_p;
5354 e.may_have_pointers = true;
5355 e.only_restrict_pointers
5356 = (!has_unknown_size
5357 && POINTER_TYPE_P (TREE_TYPE (field))
5358 && TYPE_RESTRICT (TREE_TYPE (field)));
5359 fieldstack->safe_push (e);
5360 }
5361 }
5362
5363 empty_p = false;
5364 }
5365
5366 return !empty_p;
5367 }
5368
5369 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5370 if it is a varargs function. */
5371
5372 static unsigned int
5373 count_num_arguments (tree decl, bool *is_varargs)
5374 {
5375 unsigned int num = 0;
5376 tree t;
5377
5378 /* Capture named arguments for K&R functions. They do not
5379 have a prototype and thus no TYPE_ARG_TYPES. */
5380 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5381 ++num;
5382
5383 /* Check if the function has variadic arguments. */
5384 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5385 if (TREE_VALUE (t) == void_type_node)
5386 break;
5387 if (!t)
5388 *is_varargs = true;
5389
5390 return num;
5391 }
5392
5393 /* Creation function node for DECL, using NAME, and return the index
5394 of the variable we've created for the function. */
5395
5396 static varinfo_t
5397 create_function_info_for (tree decl, const char *name)
5398 {
5399 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5400 varinfo_t vi, prev_vi;
5401 tree arg;
5402 unsigned int i;
5403 bool is_varargs = false;
5404 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5405
5406 /* Create the variable info. */
5407
5408 vi = new_var_info (decl, name);
5409 vi->offset = 0;
5410 vi->size = 1;
5411 vi->fullsize = fi_parm_base + num_args;
5412 vi->is_fn_info = 1;
5413 vi->may_have_pointers = false;
5414 if (is_varargs)
5415 vi->fullsize = ~0;
5416 insert_vi_for_tree (vi->decl, vi);
5417
5418 prev_vi = vi;
5419
5420 /* Create a variable for things the function clobbers and one for
5421 things the function uses. */
5422 {
5423 varinfo_t clobbervi, usevi;
5424 const char *newname;
5425 char *tempname;
5426
5427 asprintf (&tempname, "%s.clobber", name);
5428 newname = ggc_strdup (tempname);
5429 free (tempname);
5430
5431 clobbervi = new_var_info (NULL, newname);
5432 clobbervi->offset = fi_clobbers;
5433 clobbervi->size = 1;
5434 clobbervi->fullsize = vi->fullsize;
5435 clobbervi->is_full_var = true;
5436 clobbervi->is_global_var = false;
5437 gcc_assert (prev_vi->offset < clobbervi->offset);
5438 prev_vi->next = clobbervi->id;
5439 prev_vi = clobbervi;
5440
5441 asprintf (&tempname, "%s.use", name);
5442 newname = ggc_strdup (tempname);
5443 free (tempname);
5444
5445 usevi = new_var_info (NULL, newname);
5446 usevi->offset = fi_uses;
5447 usevi->size = 1;
5448 usevi->fullsize = vi->fullsize;
5449 usevi->is_full_var = true;
5450 usevi->is_global_var = false;
5451 gcc_assert (prev_vi->offset < usevi->offset);
5452 prev_vi->next = usevi->id;
5453 prev_vi = usevi;
5454 }
5455
5456 /* And one for the static chain. */
5457 if (fn->static_chain_decl != NULL_TREE)
5458 {
5459 varinfo_t chainvi;
5460 const char *newname;
5461 char *tempname;
5462
5463 asprintf (&tempname, "%s.chain", name);
5464 newname = ggc_strdup (tempname);
5465 free (tempname);
5466
5467 chainvi = new_var_info (fn->static_chain_decl, newname);
5468 chainvi->offset = fi_static_chain;
5469 chainvi->size = 1;
5470 chainvi->fullsize = vi->fullsize;
5471 chainvi->is_full_var = true;
5472 chainvi->is_global_var = false;
5473 gcc_assert (prev_vi->offset < chainvi->offset);
5474 prev_vi->next = chainvi->id;
5475 prev_vi = chainvi;
5476 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5477 }
5478
5479 /* Create a variable for the return var. */
5480 if (DECL_RESULT (decl) != NULL
5481 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5482 {
5483 varinfo_t resultvi;
5484 const char *newname;
5485 char *tempname;
5486 tree resultdecl = decl;
5487
5488 if (DECL_RESULT (decl))
5489 resultdecl = DECL_RESULT (decl);
5490
5491 asprintf (&tempname, "%s.result", name);
5492 newname = ggc_strdup (tempname);
5493 free (tempname);
5494
5495 resultvi = new_var_info (resultdecl, newname);
5496 resultvi->offset = fi_result;
5497 resultvi->size = 1;
5498 resultvi->fullsize = vi->fullsize;
5499 resultvi->is_full_var = true;
5500 if (DECL_RESULT (decl))
5501 resultvi->may_have_pointers = true;
5502 gcc_assert (prev_vi->offset < resultvi->offset);
5503 prev_vi->next = resultvi->id;
5504 prev_vi = resultvi;
5505 if (DECL_RESULT (decl))
5506 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5507 }
5508
5509 /* Set up variables for each argument. */
5510 arg = DECL_ARGUMENTS (decl);
5511 for (i = 0; i < num_args; i++)
5512 {
5513 varinfo_t argvi;
5514 const char *newname;
5515 char *tempname;
5516 tree argdecl = decl;
5517
5518 if (arg)
5519 argdecl = arg;
5520
5521 asprintf (&tempname, "%s.arg%d", name, i);
5522 newname = ggc_strdup (tempname);
5523 free (tempname);
5524
5525 argvi = new_var_info (argdecl, newname);
5526 argvi->offset = fi_parm_base + i;
5527 argvi->size = 1;
5528 argvi->is_full_var = true;
5529 argvi->fullsize = vi->fullsize;
5530 if (arg)
5531 argvi->may_have_pointers = true;
5532 gcc_assert (prev_vi->offset < argvi->offset);
5533 prev_vi->next = argvi->id;
5534 prev_vi = argvi;
5535 if (arg)
5536 {
5537 insert_vi_for_tree (arg, argvi);
5538 arg = DECL_CHAIN (arg);
5539 }
5540 }
5541
5542 /* Add one representative for all further args. */
5543 if (is_varargs)
5544 {
5545 varinfo_t argvi;
5546 const char *newname;
5547 char *tempname;
5548 tree decl;
5549
5550 asprintf (&tempname, "%s.varargs", name);
5551 newname = ggc_strdup (tempname);
5552 free (tempname);
5553
5554 /* We need sth that can be pointed to for va_start. */
5555 decl = build_fake_var_decl (ptr_type_node);
5556
5557 argvi = new_var_info (decl, newname);
5558 argvi->offset = fi_parm_base + num_args;
5559 argvi->size = ~0;
5560 argvi->is_full_var = true;
5561 argvi->is_heap_var = true;
5562 argvi->fullsize = vi->fullsize;
5563 gcc_assert (prev_vi->offset < argvi->offset);
5564 prev_vi->next = argvi->id;
5565 prev_vi = argvi;
5566 }
5567
5568 return vi;
5569 }
5570
5571
5572 /* Return true if FIELDSTACK contains fields that overlap.
5573 FIELDSTACK is assumed to be sorted by offset. */
5574
5575 static bool
5576 check_for_overlaps (vec<fieldoff_s> fieldstack)
5577 {
5578 fieldoff_s *fo = NULL;
5579 unsigned int i;
5580 HOST_WIDE_INT lastoffset = -1;
5581
5582 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5583 {
5584 if (fo->offset == lastoffset)
5585 return true;
5586 lastoffset = fo->offset;
5587 }
5588 return false;
5589 }
5590
5591 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5592 This will also create any varinfo structures necessary for fields
5593 of DECL. */
5594
5595 static varinfo_t
5596 create_variable_info_for_1 (tree decl, const char *name)
5597 {
5598 varinfo_t vi, newvi;
5599 tree decl_type = TREE_TYPE (decl);
5600 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5601 vec<fieldoff_s> fieldstack = vNULL;
5602 fieldoff_s *fo;
5603 unsigned int i;
5604
5605 if (!declsize
5606 || !host_integerp (declsize, 1))
5607 {
5608 vi = new_var_info (decl, name);
5609 vi->offset = 0;
5610 vi->size = ~0;
5611 vi->fullsize = ~0;
5612 vi->is_unknown_size_var = true;
5613 vi->is_full_var = true;
5614 vi->may_have_pointers = true;
5615 return vi;
5616 }
5617
5618 /* Collect field information. */
5619 if (use_field_sensitive
5620 && var_can_have_subvars (decl)
5621 /* ??? Force us to not use subfields for global initializers
5622 in IPA mode. Else we'd have to parse arbitrary initializers. */
5623 && !(in_ipa_mode
5624 && is_global_var (decl)
5625 && DECL_INITIAL (decl)))
5626 {
5627 fieldoff_s *fo = NULL;
5628 bool notokay = false;
5629 unsigned int i;
5630
5631 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5632
5633 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5634 if (fo->has_unknown_size
5635 || fo->offset < 0)
5636 {
5637 notokay = true;
5638 break;
5639 }
5640
5641 /* We can't sort them if we have a field with a variable sized type,
5642 which will make notokay = true. In that case, we are going to return
5643 without creating varinfos for the fields anyway, so sorting them is a
5644 waste to boot. */
5645 if (!notokay)
5646 {
5647 sort_fieldstack (fieldstack);
5648 /* Due to some C++ FE issues, like PR 22488, we might end up
5649 what appear to be overlapping fields even though they,
5650 in reality, do not overlap. Until the C++ FE is fixed,
5651 we will simply disable field-sensitivity for these cases. */
5652 notokay = check_for_overlaps (fieldstack);
5653 }
5654
5655 if (notokay)
5656 fieldstack.release ();
5657 }
5658
5659 /* If we didn't end up collecting sub-variables create a full
5660 variable for the decl. */
5661 if (fieldstack.length () <= 1
5662 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5663 {
5664 vi = new_var_info (decl, name);
5665 vi->offset = 0;
5666 vi->may_have_pointers = true;
5667 vi->fullsize = TREE_INT_CST_LOW (declsize);
5668 vi->size = vi->fullsize;
5669 vi->is_full_var = true;
5670 fieldstack.release ();
5671 return vi;
5672 }
5673
5674 vi = new_var_info (decl, name);
5675 vi->fullsize = TREE_INT_CST_LOW (declsize);
5676 for (i = 0, newvi = vi;
5677 fieldstack.iterate (i, &fo);
5678 ++i, newvi = vi_next (newvi))
5679 {
5680 const char *newname = "NULL";
5681 char *tempname;
5682
5683 if (dump_file)
5684 {
5685 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5686 "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5687 newname = ggc_strdup (tempname);
5688 free (tempname);
5689 }
5690 newvi->name = newname;
5691 newvi->offset = fo->offset;
5692 newvi->size = fo->size;
5693 newvi->fullsize = vi->fullsize;
5694 newvi->may_have_pointers = fo->may_have_pointers;
5695 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5696 if (i + 1 < fieldstack.length ())
5697 {
5698 varinfo_t tem = new_var_info (decl, name);
5699 newvi->next = tem->id;
5700 tem->head = vi->id;
5701 }
5702 }
5703
5704 fieldstack.release ();
5705
5706 return vi;
5707 }
5708
5709 static unsigned int
5710 create_variable_info_for (tree decl, const char *name)
5711 {
5712 varinfo_t vi = create_variable_info_for_1 (decl, name);
5713 unsigned int id = vi->id;
5714
5715 insert_vi_for_tree (decl, vi);
5716
5717 if (TREE_CODE (decl) != VAR_DECL)
5718 return id;
5719
5720 /* Create initial constraints for globals. */
5721 for (; vi; vi = vi_next (vi))
5722 {
5723 if (!vi->may_have_pointers
5724 || !vi->is_global_var)
5725 continue;
5726
5727 /* Mark global restrict qualified pointers. */
5728 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5729 && TYPE_RESTRICT (TREE_TYPE (decl)))
5730 || vi->only_restrict_pointers)
5731 {
5732 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5733 continue;
5734 }
5735
5736 /* In non-IPA mode the initializer from nonlocal is all we need. */
5737 if (!in_ipa_mode
5738 || DECL_HARD_REGISTER (decl))
5739 make_copy_constraint (vi, nonlocal_id);
5740
5741 /* In IPA mode parse the initializer and generate proper constraints
5742 for it. */
5743 else
5744 {
5745 struct varpool_node *vnode = varpool_get_node (decl);
5746
5747 /* For escaped variables initialize them from nonlocal. */
5748 if (!varpool_all_refs_explicit_p (vnode))
5749 make_copy_constraint (vi, nonlocal_id);
5750
5751 /* If this is a global variable with an initializer and we are in
5752 IPA mode generate constraints for it. */
5753 if (DECL_INITIAL (decl)
5754 && vnode->symbol.definition)
5755 {
5756 vec<ce_s> rhsc = vNULL;
5757 struct constraint_expr lhs, *rhsp;
5758 unsigned i;
5759 get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5760 lhs.var = vi->id;
5761 lhs.offset = 0;
5762 lhs.type = SCALAR;
5763 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5764 process_constraint (new_constraint (lhs, *rhsp));
5765 /* If this is a variable that escapes from the unit
5766 the initializer escapes as well. */
5767 if (!varpool_all_refs_explicit_p (vnode))
5768 {
5769 lhs.var = escaped_id;
5770 lhs.offset = 0;
5771 lhs.type = SCALAR;
5772 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5773 process_constraint (new_constraint (lhs, *rhsp));
5774 }
5775 rhsc.release ();
5776 }
5777 }
5778 }
5779
5780 return id;
5781 }
5782
5783 /* Print out the points-to solution for VAR to FILE. */
5784
5785 static void
5786 dump_solution_for_var (FILE *file, unsigned int var)
5787 {
5788 varinfo_t vi = get_varinfo (var);
5789 unsigned int i;
5790 bitmap_iterator bi;
5791
5792 /* Dump the solution for unified vars anyway, this avoids difficulties
5793 in scanning dumps in the testsuite. */
5794 fprintf (file, "%s = { ", vi->name);
5795 vi = get_varinfo (find (var));
5796 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5797 fprintf (file, "%s ", get_varinfo (i)->name);
5798 fprintf (file, "}");
5799
5800 /* But note when the variable was unified. */
5801 if (vi->id != var)
5802 fprintf (file, " same as %s", vi->name);
5803
5804 fprintf (file, "\n");
5805 }
5806
5807 /* Print the points-to solution for VAR to stdout. */
5808
5809 DEBUG_FUNCTION void
5810 debug_solution_for_var (unsigned int var)
5811 {
5812 dump_solution_for_var (stdout, var);
5813 }
5814
5815 /* Create varinfo structures for all of the variables in the
5816 function for intraprocedural mode. */
5817
5818 static void
5819 intra_create_variable_infos (void)
5820 {
5821 tree t;
5822
5823 /* For each incoming pointer argument arg, create the constraint ARG
5824 = NONLOCAL or a dummy variable if it is a restrict qualified
5825 passed-by-reference argument. */
5826 for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5827 {
5828 varinfo_t p = get_vi_for_tree (t);
5829
5830 /* For restrict qualified pointers to objects passed by
5831 reference build a real representative for the pointed-to object.
5832 Treat restrict qualified references the same. */
5833 if (TYPE_RESTRICT (TREE_TYPE (t))
5834 && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5835 || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5836 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5837 {
5838 struct constraint_expr lhsc, rhsc;
5839 varinfo_t vi;
5840 tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5841 DECL_EXTERNAL (heapvar) = 1;
5842 vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5843 insert_vi_for_tree (heapvar, vi);
5844 lhsc.var = p->id;
5845 lhsc.type = SCALAR;
5846 lhsc.offset = 0;
5847 rhsc.var = vi->id;
5848 rhsc.type = ADDRESSOF;
5849 rhsc.offset = 0;
5850 process_constraint (new_constraint (lhsc, rhsc));
5851 for (; vi; vi = vi_next (vi))
5852 if (vi->may_have_pointers)
5853 {
5854 if (vi->only_restrict_pointers)
5855 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5856 else
5857 make_copy_constraint (vi, nonlocal_id);
5858 }
5859 continue;
5860 }
5861
5862 if (POINTER_TYPE_P (TREE_TYPE (t))
5863 && TYPE_RESTRICT (TREE_TYPE (t)))
5864 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5865 else
5866 {
5867 for (; p; p = vi_next (p))
5868 {
5869 if (p->only_restrict_pointers)
5870 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5871 else if (p->may_have_pointers)
5872 make_constraint_from (p, nonlocal_id);
5873 }
5874 }
5875 }
5876
5877 /* Add a constraint for a result decl that is passed by reference. */
5878 if (DECL_RESULT (cfun->decl)
5879 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5880 {
5881 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5882
5883 for (p = result_vi; p; p = vi_next (p))
5884 make_constraint_from (p, nonlocal_id);
5885 }
5886
5887 /* Add a constraint for the incoming static chain parameter. */
5888 if (cfun->static_chain_decl != NULL_TREE)
5889 {
5890 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5891
5892 for (p = chain_vi; p; p = vi_next (p))
5893 make_constraint_from (p, nonlocal_id);
5894 }
5895 }
5896
5897 /* Structure used to put solution bitmaps in a hashtable so they can
5898 be shared among variables with the same points-to set. */
5899
5900 typedef struct shared_bitmap_info
5901 {
5902 bitmap pt_vars;
5903 hashval_t hashcode;
5904 } *shared_bitmap_info_t;
5905 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5906
5907 /* Shared_bitmap hashtable helpers. */
5908
5909 struct shared_bitmap_hasher : typed_free_remove <shared_bitmap_info>
5910 {
5911 typedef shared_bitmap_info value_type;
5912 typedef shared_bitmap_info compare_type;
5913 static inline hashval_t hash (const value_type *);
5914 static inline bool equal (const value_type *, const compare_type *);
5915 };
5916
5917 /* Hash function for a shared_bitmap_info_t */
5918
5919 inline hashval_t
5920 shared_bitmap_hasher::hash (const value_type *bi)
5921 {
5922 return bi->hashcode;
5923 }
5924
5925 /* Equality function for two shared_bitmap_info_t's. */
5926
5927 inline bool
5928 shared_bitmap_hasher::equal (const value_type *sbi1, const compare_type *sbi2)
5929 {
5930 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5931 }
5932
5933 /* Shared_bitmap hashtable. */
5934
5935 static hash_table <shared_bitmap_hasher> shared_bitmap_table;
5936
5937 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5938 existing instance if there is one, NULL otherwise. */
5939
5940 static bitmap
5941 shared_bitmap_lookup (bitmap pt_vars)
5942 {
5943 shared_bitmap_info **slot;
5944 struct shared_bitmap_info sbi;
5945
5946 sbi.pt_vars = pt_vars;
5947 sbi.hashcode = bitmap_hash (pt_vars);
5948
5949 slot = shared_bitmap_table.find_slot_with_hash (&sbi, sbi.hashcode,
5950 NO_INSERT);
5951 if (!slot)
5952 return NULL;
5953 else
5954 return (*slot)->pt_vars;
5955 }
5956
5957
5958 /* Add a bitmap to the shared bitmap hashtable. */
5959
5960 static void
5961 shared_bitmap_add (bitmap pt_vars)
5962 {
5963 shared_bitmap_info **slot;
5964 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5965
5966 sbi->pt_vars = pt_vars;
5967 sbi->hashcode = bitmap_hash (pt_vars);
5968
5969 slot = shared_bitmap_table.find_slot_with_hash (sbi, sbi->hashcode, INSERT);
5970 gcc_assert (!*slot);
5971 *slot = sbi;
5972 }
5973
5974
5975 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5976
5977 static void
5978 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5979 {
5980 unsigned int i;
5981 bitmap_iterator bi;
5982
5983 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5984 {
5985 varinfo_t vi = get_varinfo (i);
5986
5987 /* The only artificial variables that are allowed in a may-alias
5988 set are heap variables. */
5989 if (vi->is_artificial_var && !vi->is_heap_var)
5990 continue;
5991
5992 if (TREE_CODE (vi->decl) == VAR_DECL
5993 || TREE_CODE (vi->decl) == PARM_DECL
5994 || TREE_CODE (vi->decl) == RESULT_DECL)
5995 {
5996 /* If we are in IPA mode we will not recompute points-to
5997 sets after inlining so make sure they stay valid. */
5998 if (in_ipa_mode
5999 && !DECL_PT_UID_SET_P (vi->decl))
6000 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6001
6002 /* Add the decl to the points-to set. Note that the points-to
6003 set contains global variables. */
6004 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6005 if (vi->is_global_var)
6006 pt->vars_contains_global = true;
6007 }
6008 }
6009 }
6010
6011
6012 /* Compute the points-to solution *PT for the variable VI. */
6013
6014 static struct pt_solution
6015 find_what_var_points_to (varinfo_t orig_vi)
6016 {
6017 unsigned int i;
6018 bitmap_iterator bi;
6019 bitmap finished_solution;
6020 bitmap result;
6021 varinfo_t vi;
6022 void **slot;
6023 struct pt_solution *pt;
6024
6025 /* This variable may have been collapsed, let's get the real
6026 variable. */
6027 vi = get_varinfo (find (orig_vi->id));
6028
6029 /* See if we have already computed the solution and return it. */
6030 slot = pointer_map_insert (final_solutions, vi);
6031 if (*slot != NULL)
6032 return *(struct pt_solution *)*slot;
6033
6034 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6035 memset (pt, 0, sizeof (struct pt_solution));
6036
6037 /* Translate artificial variables into SSA_NAME_PTR_INFO
6038 attributes. */
6039 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6040 {
6041 varinfo_t vi = get_varinfo (i);
6042
6043 if (vi->is_artificial_var)
6044 {
6045 if (vi->id == nothing_id)
6046 pt->null = 1;
6047 else if (vi->id == escaped_id)
6048 {
6049 if (in_ipa_mode)
6050 pt->ipa_escaped = 1;
6051 else
6052 pt->escaped = 1;
6053 }
6054 else if (vi->id == nonlocal_id)
6055 pt->nonlocal = 1;
6056 else if (vi->is_heap_var)
6057 /* We represent heapvars in the points-to set properly. */
6058 ;
6059 else if (vi->id == readonly_id)
6060 /* Nobody cares. */
6061 ;
6062 else if (vi->id == anything_id
6063 || vi->id == integer_id)
6064 pt->anything = 1;
6065 }
6066 }
6067
6068 /* Instead of doing extra work, simply do not create
6069 elaborate points-to information for pt_anything pointers. */
6070 if (pt->anything)
6071 return *pt;
6072
6073 /* Share the final set of variables when possible. */
6074 finished_solution = BITMAP_GGC_ALLOC ();
6075 stats.points_to_sets_created++;
6076
6077 set_uids_in_ptset (finished_solution, vi->solution, pt);
6078 result = shared_bitmap_lookup (finished_solution);
6079 if (!result)
6080 {
6081 shared_bitmap_add (finished_solution);
6082 pt->vars = finished_solution;
6083 }
6084 else
6085 {
6086 pt->vars = result;
6087 bitmap_clear (finished_solution);
6088 }
6089
6090 return *pt;
6091 }
6092
6093 /* Given a pointer variable P, fill in its points-to set. */
6094
6095 static void
6096 find_what_p_points_to (tree p)
6097 {
6098 struct ptr_info_def *pi;
6099 tree lookup_p = p;
6100 varinfo_t vi;
6101
6102 /* For parameters, get at the points-to set for the actual parm
6103 decl. */
6104 if (TREE_CODE (p) == SSA_NAME
6105 && SSA_NAME_IS_DEFAULT_DEF (p)
6106 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6107 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6108 lookup_p = SSA_NAME_VAR (p);
6109
6110 vi = lookup_vi_for_tree (lookup_p);
6111 if (!vi)
6112 return;
6113
6114 pi = get_ptr_info (p);
6115 pi->pt = find_what_var_points_to (vi);
6116 }
6117
6118
6119 /* Query statistics for points-to solutions. */
6120
6121 static struct {
6122 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6123 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6124 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6125 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6126 } pta_stats;
6127
6128 void
6129 dump_pta_stats (FILE *s)
6130 {
6131 fprintf (s, "\nPTA query stats:\n");
6132 fprintf (s, " pt_solution_includes: "
6133 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6134 HOST_WIDE_INT_PRINT_DEC" queries\n",
6135 pta_stats.pt_solution_includes_no_alias,
6136 pta_stats.pt_solution_includes_no_alias
6137 + pta_stats.pt_solution_includes_may_alias);
6138 fprintf (s, " pt_solutions_intersect: "
6139 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6140 HOST_WIDE_INT_PRINT_DEC" queries\n",
6141 pta_stats.pt_solutions_intersect_no_alias,
6142 pta_stats.pt_solutions_intersect_no_alias
6143 + pta_stats.pt_solutions_intersect_may_alias);
6144 }
6145
6146
6147 /* Reset the points-to solution *PT to a conservative default
6148 (point to anything). */
6149
6150 void
6151 pt_solution_reset (struct pt_solution *pt)
6152 {
6153 memset (pt, 0, sizeof (struct pt_solution));
6154 pt->anything = true;
6155 }
6156
6157 /* Set the points-to solution *PT to point only to the variables
6158 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6159 global variables and VARS_CONTAINS_RESTRICT specifies whether
6160 it contains restrict tag variables. */
6161
6162 void
6163 pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
6164 {
6165 memset (pt, 0, sizeof (struct pt_solution));
6166 pt->vars = vars;
6167 pt->vars_contains_global = vars_contains_global;
6168 }
6169
6170 /* Set the points-to solution *PT to point only to the variable VAR. */
6171
6172 void
6173 pt_solution_set_var (struct pt_solution *pt, tree var)
6174 {
6175 memset (pt, 0, sizeof (struct pt_solution));
6176 pt->vars = BITMAP_GGC_ALLOC ();
6177 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6178 pt->vars_contains_global = is_global_var (var);
6179 }
6180
6181 /* Computes the union of the points-to solutions *DEST and *SRC and
6182 stores the result in *DEST. This changes the points-to bitmap
6183 of *DEST and thus may not be used if that might be shared.
6184 The points-to bitmap of *SRC and *DEST will not be shared after
6185 this function if they were not before. */
6186
6187 static void
6188 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6189 {
6190 dest->anything |= src->anything;
6191 if (dest->anything)
6192 {
6193 pt_solution_reset (dest);
6194 return;
6195 }
6196
6197 dest->nonlocal |= src->nonlocal;
6198 dest->escaped |= src->escaped;
6199 dest->ipa_escaped |= src->ipa_escaped;
6200 dest->null |= src->null;
6201 dest->vars_contains_global |= src->vars_contains_global;
6202 if (!src->vars)
6203 return;
6204
6205 if (!dest->vars)
6206 dest->vars = BITMAP_GGC_ALLOC ();
6207 bitmap_ior_into (dest->vars, src->vars);
6208 }
6209
6210 /* Return true if the points-to solution *PT is empty. */
6211
6212 bool
6213 pt_solution_empty_p (struct pt_solution *pt)
6214 {
6215 if (pt->anything
6216 || pt->nonlocal)
6217 return false;
6218
6219 if (pt->vars
6220 && !bitmap_empty_p (pt->vars))
6221 return false;
6222
6223 /* If the solution includes ESCAPED, check if that is empty. */
6224 if (pt->escaped
6225 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6226 return false;
6227
6228 /* If the solution includes ESCAPED, check if that is empty. */
6229 if (pt->ipa_escaped
6230 && !pt_solution_empty_p (&ipa_escaped_pt))
6231 return false;
6232
6233 return true;
6234 }
6235
6236 /* Return true if the points-to solution *PT only point to a single var, and
6237 return the var uid in *UID. */
6238
6239 bool
6240 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6241 {
6242 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6243 || pt->null || pt->vars == NULL
6244 || !bitmap_single_bit_set_p (pt->vars))
6245 return false;
6246
6247 *uid = bitmap_first_set_bit (pt->vars);
6248 return true;
6249 }
6250
6251 /* Return true if the points-to solution *PT includes global memory. */
6252
6253 bool
6254 pt_solution_includes_global (struct pt_solution *pt)
6255 {
6256 if (pt->anything
6257 || pt->nonlocal
6258 || pt->vars_contains_global)
6259 return true;
6260
6261 if (pt->escaped)
6262 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6263
6264 if (pt->ipa_escaped)
6265 return pt_solution_includes_global (&ipa_escaped_pt);
6266
6267 /* ??? This predicate is not correct for the IPA-PTA solution
6268 as we do not properly distinguish between unit escape points
6269 and global variables. */
6270 if (cfun->gimple_df->ipa_pta)
6271 return true;
6272
6273 return false;
6274 }
6275
6276 /* Return true if the points-to solution *PT includes the variable
6277 declaration DECL. */
6278
6279 static bool
6280 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6281 {
6282 if (pt->anything)
6283 return true;
6284
6285 if (pt->nonlocal
6286 && is_global_var (decl))
6287 return true;
6288
6289 if (pt->vars
6290 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6291 return true;
6292
6293 /* If the solution includes ESCAPED, check it. */
6294 if (pt->escaped
6295 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6296 return true;
6297
6298 /* If the solution includes ESCAPED, check it. */
6299 if (pt->ipa_escaped
6300 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6301 return true;
6302
6303 return false;
6304 }
6305
6306 bool
6307 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6308 {
6309 bool res = pt_solution_includes_1 (pt, decl);
6310 if (res)
6311 ++pta_stats.pt_solution_includes_may_alias;
6312 else
6313 ++pta_stats.pt_solution_includes_no_alias;
6314 return res;
6315 }
6316
6317 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6318 intersection. */
6319
6320 static bool
6321 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6322 {
6323 if (pt1->anything || pt2->anything)
6324 return true;
6325
6326 /* If either points to unknown global memory and the other points to
6327 any global memory they alias. */
6328 if ((pt1->nonlocal
6329 && (pt2->nonlocal
6330 || pt2->vars_contains_global))
6331 || (pt2->nonlocal
6332 && pt1->vars_contains_global))
6333 return true;
6334
6335 /* Check the escaped solution if required. */
6336 if ((pt1->escaped || pt2->escaped)
6337 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6338 {
6339 /* If both point to escaped memory and that solution
6340 is not empty they alias. */
6341 if (pt1->escaped && pt2->escaped)
6342 return true;
6343
6344 /* If either points to escaped memory see if the escaped solution
6345 intersects with the other. */
6346 if ((pt1->escaped
6347 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
6348 || (pt2->escaped
6349 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
6350 return true;
6351 }
6352
6353 /* Check the escaped solution if required.
6354 ??? Do we need to check the local against the IPA escaped sets? */
6355 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6356 && !pt_solution_empty_p (&ipa_escaped_pt))
6357 {
6358 /* If both point to escaped memory and that solution
6359 is not empty they alias. */
6360 if (pt1->ipa_escaped && pt2->ipa_escaped)
6361 return true;
6362
6363 /* If either points to escaped memory see if the escaped solution
6364 intersects with the other. */
6365 if ((pt1->ipa_escaped
6366 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6367 || (pt2->ipa_escaped
6368 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6369 return true;
6370 }
6371
6372 /* Now both pointers alias if their points-to solution intersects. */
6373 return (pt1->vars
6374 && pt2->vars
6375 && bitmap_intersect_p (pt1->vars, pt2->vars));
6376 }
6377
6378 bool
6379 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6380 {
6381 bool res = pt_solutions_intersect_1 (pt1, pt2);
6382 if (res)
6383 ++pta_stats.pt_solutions_intersect_may_alias;
6384 else
6385 ++pta_stats.pt_solutions_intersect_no_alias;
6386 return res;
6387 }
6388
6389
6390 /* Dump points-to information to OUTFILE. */
6391
6392 static void
6393 dump_sa_points_to_info (FILE *outfile)
6394 {
6395 unsigned int i;
6396
6397 fprintf (outfile, "\nPoints-to sets\n\n");
6398
6399 if (dump_flags & TDF_STATS)
6400 {
6401 fprintf (outfile, "Stats:\n");
6402 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6403 fprintf (outfile, "Non-pointer vars: %d\n",
6404 stats.nonpointer_vars);
6405 fprintf (outfile, "Statically unified vars: %d\n",
6406 stats.unified_vars_static);
6407 fprintf (outfile, "Dynamically unified vars: %d\n",
6408 stats.unified_vars_dynamic);
6409 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6410 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6411 fprintf (outfile, "Number of implicit edges: %d\n",
6412 stats.num_implicit_edges);
6413 }
6414
6415 for (i = 1; i < varmap.length (); i++)
6416 {
6417 varinfo_t vi = get_varinfo (i);
6418 if (!vi->may_have_pointers)
6419 continue;
6420 dump_solution_for_var (outfile, i);
6421 }
6422 }
6423
6424
6425 /* Debug points-to information to stderr. */
6426
6427 DEBUG_FUNCTION void
6428 debug_sa_points_to_info (void)
6429 {
6430 dump_sa_points_to_info (stderr);
6431 }
6432
6433
6434 /* Initialize the always-existing constraint variables for NULL
6435 ANYTHING, READONLY, and INTEGER */
6436
6437 static void
6438 init_base_vars (void)
6439 {
6440 struct constraint_expr lhs, rhs;
6441 varinfo_t var_anything;
6442 varinfo_t var_nothing;
6443 varinfo_t var_readonly;
6444 varinfo_t var_escaped;
6445 varinfo_t var_nonlocal;
6446 varinfo_t var_storedanything;
6447 varinfo_t var_integer;
6448
6449 /* Variable ID zero is reserved and should be NULL. */
6450 varmap.safe_push (NULL);
6451
6452 /* Create the NULL variable, used to represent that a variable points
6453 to NULL. */
6454 var_nothing = new_var_info (NULL_TREE, "NULL");
6455 gcc_assert (var_nothing->id == nothing_id);
6456 var_nothing->is_artificial_var = 1;
6457 var_nothing->offset = 0;
6458 var_nothing->size = ~0;
6459 var_nothing->fullsize = ~0;
6460 var_nothing->is_special_var = 1;
6461 var_nothing->may_have_pointers = 0;
6462 var_nothing->is_global_var = 0;
6463
6464 /* Create the ANYTHING variable, used to represent that a variable
6465 points to some unknown piece of memory. */
6466 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6467 gcc_assert (var_anything->id == anything_id);
6468 var_anything->is_artificial_var = 1;
6469 var_anything->size = ~0;
6470 var_anything->offset = 0;
6471 var_anything->fullsize = ~0;
6472 var_anything->is_special_var = 1;
6473
6474 /* Anything points to anything. This makes deref constraints just
6475 work in the presence of linked list and other p = *p type loops,
6476 by saying that *ANYTHING = ANYTHING. */
6477 lhs.type = SCALAR;
6478 lhs.var = anything_id;
6479 lhs.offset = 0;
6480 rhs.type = ADDRESSOF;
6481 rhs.var = anything_id;
6482 rhs.offset = 0;
6483
6484 /* This specifically does not use process_constraint because
6485 process_constraint ignores all anything = anything constraints, since all
6486 but this one are redundant. */
6487 constraints.safe_push (new_constraint (lhs, rhs));
6488
6489 /* Create the READONLY variable, used to represent that a variable
6490 points to readonly memory. */
6491 var_readonly = new_var_info (NULL_TREE, "READONLY");
6492 gcc_assert (var_readonly->id == readonly_id);
6493 var_readonly->is_artificial_var = 1;
6494 var_readonly->offset = 0;
6495 var_readonly->size = ~0;
6496 var_readonly->fullsize = ~0;
6497 var_readonly->is_special_var = 1;
6498
6499 /* readonly memory points to anything, in order to make deref
6500 easier. In reality, it points to anything the particular
6501 readonly variable can point to, but we don't track this
6502 separately. */
6503 lhs.type = SCALAR;
6504 lhs.var = readonly_id;
6505 lhs.offset = 0;
6506 rhs.type = ADDRESSOF;
6507 rhs.var = readonly_id; /* FIXME */
6508 rhs.offset = 0;
6509 process_constraint (new_constraint (lhs, rhs));
6510
6511 /* Create the ESCAPED variable, used to represent the set of escaped
6512 memory. */
6513 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6514 gcc_assert (var_escaped->id == escaped_id);
6515 var_escaped->is_artificial_var = 1;
6516 var_escaped->offset = 0;
6517 var_escaped->size = ~0;
6518 var_escaped->fullsize = ~0;
6519 var_escaped->is_special_var = 0;
6520
6521 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6522 memory. */
6523 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6524 gcc_assert (var_nonlocal->id == nonlocal_id);
6525 var_nonlocal->is_artificial_var = 1;
6526 var_nonlocal->offset = 0;
6527 var_nonlocal->size = ~0;
6528 var_nonlocal->fullsize = ~0;
6529 var_nonlocal->is_special_var = 1;
6530
6531 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6532 lhs.type = SCALAR;
6533 lhs.var = escaped_id;
6534 lhs.offset = 0;
6535 rhs.type = DEREF;
6536 rhs.var = escaped_id;
6537 rhs.offset = 0;
6538 process_constraint (new_constraint (lhs, rhs));
6539
6540 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6541 whole variable escapes. */
6542 lhs.type = SCALAR;
6543 lhs.var = escaped_id;
6544 lhs.offset = 0;
6545 rhs.type = SCALAR;
6546 rhs.var = escaped_id;
6547 rhs.offset = UNKNOWN_OFFSET;
6548 process_constraint (new_constraint (lhs, rhs));
6549
6550 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6551 everything pointed to by escaped points to what global memory can
6552 point to. */
6553 lhs.type = DEREF;
6554 lhs.var = escaped_id;
6555 lhs.offset = 0;
6556 rhs.type = SCALAR;
6557 rhs.var = nonlocal_id;
6558 rhs.offset = 0;
6559 process_constraint (new_constraint (lhs, rhs));
6560
6561 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6562 global memory may point to global memory and escaped memory. */
6563 lhs.type = SCALAR;
6564 lhs.var = nonlocal_id;
6565 lhs.offset = 0;
6566 rhs.type = ADDRESSOF;
6567 rhs.var = nonlocal_id;
6568 rhs.offset = 0;
6569 process_constraint (new_constraint (lhs, rhs));
6570 rhs.type = ADDRESSOF;
6571 rhs.var = escaped_id;
6572 rhs.offset = 0;
6573 process_constraint (new_constraint (lhs, rhs));
6574
6575 /* Create the STOREDANYTHING variable, used to represent the set of
6576 variables stored to *ANYTHING. */
6577 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6578 gcc_assert (var_storedanything->id == storedanything_id);
6579 var_storedanything->is_artificial_var = 1;
6580 var_storedanything->offset = 0;
6581 var_storedanything->size = ~0;
6582 var_storedanything->fullsize = ~0;
6583 var_storedanything->is_special_var = 0;
6584
6585 /* Create the INTEGER variable, used to represent that a variable points
6586 to what an INTEGER "points to". */
6587 var_integer = new_var_info (NULL_TREE, "INTEGER");
6588 gcc_assert (var_integer->id == integer_id);
6589 var_integer->is_artificial_var = 1;
6590 var_integer->size = ~0;
6591 var_integer->fullsize = ~0;
6592 var_integer->offset = 0;
6593 var_integer->is_special_var = 1;
6594
6595 /* INTEGER = ANYTHING, because we don't know where a dereference of
6596 a random integer will point to. */
6597 lhs.type = SCALAR;
6598 lhs.var = integer_id;
6599 lhs.offset = 0;
6600 rhs.type = ADDRESSOF;
6601 rhs.var = anything_id;
6602 rhs.offset = 0;
6603 process_constraint (new_constraint (lhs, rhs));
6604 }
6605
6606 /* Initialize things necessary to perform PTA */
6607
6608 static void
6609 init_alias_vars (void)
6610 {
6611 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6612
6613 bitmap_obstack_initialize (&pta_obstack);
6614 bitmap_obstack_initialize (&oldpta_obstack);
6615 bitmap_obstack_initialize (&predbitmap_obstack);
6616
6617 constraint_pool = create_alloc_pool ("Constraint pool",
6618 sizeof (struct constraint), 30);
6619 variable_info_pool = create_alloc_pool ("Variable info pool",
6620 sizeof (struct variable_info), 30);
6621 constraints.create (8);
6622 varmap.create (8);
6623 vi_for_tree = pointer_map_create ();
6624 call_stmt_vars = pointer_map_create ();
6625
6626 memset (&stats, 0, sizeof (stats));
6627 shared_bitmap_table.create (511);
6628 init_base_vars ();
6629
6630 gcc_obstack_init (&fake_var_decl_obstack);
6631
6632 final_solutions = pointer_map_create ();
6633 gcc_obstack_init (&final_solutions_obstack);
6634 }
6635
6636 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6637 predecessor edges. */
6638
6639 static void
6640 remove_preds_and_fake_succs (constraint_graph_t graph)
6641 {
6642 unsigned int i;
6643
6644 /* Clear the implicit ref and address nodes from the successor
6645 lists. */
6646 for (i = 1; i < FIRST_REF_NODE; i++)
6647 {
6648 if (graph->succs[i])
6649 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6650 FIRST_REF_NODE * 2);
6651 }
6652
6653 /* Free the successor list for the non-ref nodes. */
6654 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
6655 {
6656 if (graph->succs[i])
6657 BITMAP_FREE (graph->succs[i]);
6658 }
6659
6660 /* Now reallocate the size of the successor list as, and blow away
6661 the predecessor bitmaps. */
6662 graph->size = varmap.length ();
6663 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6664
6665 free (graph->implicit_preds);
6666 graph->implicit_preds = NULL;
6667 free (graph->preds);
6668 graph->preds = NULL;
6669 bitmap_obstack_release (&predbitmap_obstack);
6670 }
6671
6672 /* Solve the constraint set. */
6673
6674 static void
6675 solve_constraints (void)
6676 {
6677 struct scc_info *si;
6678
6679 if (dump_file)
6680 fprintf (dump_file,
6681 "\nCollapsing static cycles and doing variable "
6682 "substitution\n");
6683
6684 init_graph (varmap.length () * 2);
6685
6686 if (dump_file)
6687 fprintf (dump_file, "Building predecessor graph\n");
6688 build_pred_graph ();
6689
6690 if (dump_file)
6691 fprintf (dump_file, "Detecting pointer and location "
6692 "equivalences\n");
6693 si = perform_var_substitution (graph);
6694
6695 if (dump_file)
6696 fprintf (dump_file, "Rewriting constraints and unifying "
6697 "variables\n");
6698 rewrite_constraints (graph, si);
6699
6700 build_succ_graph ();
6701
6702 free_var_substitution_info (si);
6703
6704 /* Attach complex constraints to graph nodes. */
6705 move_complex_constraints (graph);
6706
6707 if (dump_file)
6708 fprintf (dump_file, "Uniting pointer but not location equivalent "
6709 "variables\n");
6710 unite_pointer_equivalences (graph);
6711
6712 if (dump_file)
6713 fprintf (dump_file, "Finding indirect cycles\n");
6714 find_indirect_cycles (graph);
6715
6716 /* Implicit nodes and predecessors are no longer necessary at this
6717 point. */
6718 remove_preds_and_fake_succs (graph);
6719
6720 if (dump_file && (dump_flags & TDF_GRAPH))
6721 {
6722 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6723 "in dot format:\n");
6724 dump_constraint_graph (dump_file);
6725 fprintf (dump_file, "\n\n");
6726 }
6727
6728 if (dump_file)
6729 fprintf (dump_file, "Solving graph\n");
6730
6731 solve_graph (graph);
6732
6733 if (dump_file && (dump_flags & TDF_GRAPH))
6734 {
6735 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6736 "in dot format:\n");
6737 dump_constraint_graph (dump_file);
6738 fprintf (dump_file, "\n\n");
6739 }
6740
6741 if (dump_file)
6742 dump_sa_points_to_info (dump_file);
6743 }
6744
6745 /* Create points-to sets for the current function. See the comments
6746 at the start of the file for an algorithmic overview. */
6747
6748 static void
6749 compute_points_to_sets (void)
6750 {
6751 basic_block bb;
6752 unsigned i;
6753 varinfo_t vi;
6754
6755 timevar_push (TV_TREE_PTA);
6756
6757 init_alias_vars ();
6758
6759 intra_create_variable_infos ();
6760
6761 /* Now walk all statements and build the constraint set. */
6762 FOR_EACH_BB (bb)
6763 {
6764 gimple_stmt_iterator gsi;
6765
6766 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6767 {
6768 gimple phi = gsi_stmt (gsi);
6769
6770 if (! virtual_operand_p (gimple_phi_result (phi)))
6771 find_func_aliases (phi);
6772 }
6773
6774 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6775 {
6776 gimple stmt = gsi_stmt (gsi);
6777
6778 find_func_aliases (stmt);
6779 }
6780 }
6781
6782 if (dump_file)
6783 {
6784 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6785 dump_constraints (dump_file, 0);
6786 }
6787
6788 /* From the constraints compute the points-to sets. */
6789 solve_constraints ();
6790
6791 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6792 cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
6793
6794 /* Make sure the ESCAPED solution (which is used as placeholder in
6795 other solutions) does not reference itself. This simplifies
6796 points-to solution queries. */
6797 cfun->gimple_df->escaped.escaped = 0;
6798
6799 /* Mark escaped HEAP variables as global. */
6800 FOR_EACH_VEC_ELT (varmap, i, vi)
6801 if (vi
6802 && vi->is_heap_var
6803 && !vi->is_global_var)
6804 DECL_EXTERNAL (vi->decl) = vi->is_global_var
6805 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6806
6807 /* Compute the points-to sets for pointer SSA_NAMEs. */
6808 for (i = 0; i < num_ssa_names; ++i)
6809 {
6810 tree ptr = ssa_name (i);
6811 if (ptr
6812 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6813 find_what_p_points_to (ptr);
6814 }
6815
6816 /* Compute the call-used/clobbered sets. */
6817 FOR_EACH_BB (bb)
6818 {
6819 gimple_stmt_iterator gsi;
6820
6821 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6822 {
6823 gimple stmt = gsi_stmt (gsi);
6824 struct pt_solution *pt;
6825 if (!is_gimple_call (stmt))
6826 continue;
6827
6828 pt = gimple_call_use_set (stmt);
6829 if (gimple_call_flags (stmt) & ECF_CONST)
6830 memset (pt, 0, sizeof (struct pt_solution));
6831 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6832 {
6833 *pt = find_what_var_points_to (vi);
6834 /* Escaped (and thus nonlocal) variables are always
6835 implicitly used by calls. */
6836 /* ??? ESCAPED can be empty even though NONLOCAL
6837 always escaped. */
6838 pt->nonlocal = 1;
6839 pt->escaped = 1;
6840 }
6841 else
6842 {
6843 /* If there is nothing special about this call then
6844 we have made everything that is used also escape. */
6845 *pt = cfun->gimple_df->escaped;
6846 pt->nonlocal = 1;
6847 }
6848
6849 pt = gimple_call_clobber_set (stmt);
6850 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6851 memset (pt, 0, sizeof (struct pt_solution));
6852 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6853 {
6854 *pt = find_what_var_points_to (vi);
6855 /* Escaped (and thus nonlocal) variables are always
6856 implicitly clobbered by calls. */
6857 /* ??? ESCAPED can be empty even though NONLOCAL
6858 always escaped. */
6859 pt->nonlocal = 1;
6860 pt->escaped = 1;
6861 }
6862 else
6863 {
6864 /* If there is nothing special about this call then
6865 we have made everything that is used also escape. */
6866 *pt = cfun->gimple_df->escaped;
6867 pt->nonlocal = 1;
6868 }
6869 }
6870 }
6871
6872 timevar_pop (TV_TREE_PTA);
6873 }
6874
6875
6876 /* Delete created points-to sets. */
6877
6878 static void
6879 delete_points_to_sets (void)
6880 {
6881 unsigned int i;
6882
6883 shared_bitmap_table.dispose ();
6884 if (dump_file && (dump_flags & TDF_STATS))
6885 fprintf (dump_file, "Points to sets created:%d\n",
6886 stats.points_to_sets_created);
6887
6888 pointer_map_destroy (vi_for_tree);
6889 pointer_map_destroy (call_stmt_vars);
6890 bitmap_obstack_release (&pta_obstack);
6891 constraints.release ();
6892
6893 for (i = 0; i < graph->size; i++)
6894 graph->complex[i].release ();
6895 free (graph->complex);
6896
6897 free (graph->rep);
6898 free (graph->succs);
6899 free (graph->pe);
6900 free (graph->pe_rep);
6901 free (graph->indirect_cycles);
6902 free (graph);
6903
6904 varmap.release ();
6905 free_alloc_pool (variable_info_pool);
6906 free_alloc_pool (constraint_pool);
6907
6908 obstack_free (&fake_var_decl_obstack, NULL);
6909
6910 pointer_map_destroy (final_solutions);
6911 obstack_free (&final_solutions_obstack, NULL);
6912 }
6913
6914
6915 /* Compute points-to information for every SSA_NAME pointer in the
6916 current function and compute the transitive closure of escaped
6917 variables to re-initialize the call-clobber states of local variables. */
6918
6919 unsigned int
6920 compute_may_aliases (void)
6921 {
6922 if (cfun->gimple_df->ipa_pta)
6923 {
6924 if (dump_file)
6925 {
6926 fprintf (dump_file, "\nNot re-computing points-to information "
6927 "because IPA points-to information is available.\n\n");
6928
6929 /* But still dump what we have remaining it. */
6930 dump_alias_info (dump_file);
6931 }
6932
6933 return 0;
6934 }
6935
6936 /* For each pointer P_i, determine the sets of variables that P_i may
6937 point-to. Compute the reachability set of escaped and call-used
6938 variables. */
6939 compute_points_to_sets ();
6940
6941 /* Debugging dumps. */
6942 if (dump_file)
6943 dump_alias_info (dump_file);
6944
6945 /* Deallocate memory used by aliasing data structures and the internal
6946 points-to solution. */
6947 delete_points_to_sets ();
6948
6949 gcc_assert (!need_ssa_update_p (cfun));
6950
6951 return 0;
6952 }
6953
6954 static bool
6955 gate_tree_pta (void)
6956 {
6957 return flag_tree_pta;
6958 }
6959
6960 /* A dummy pass to cause points-to information to be computed via
6961 TODO_rebuild_alias. */
6962
6963 struct gimple_opt_pass pass_build_alias =
6964 {
6965 {
6966 GIMPLE_PASS,
6967 "alias", /* name */
6968 OPTGROUP_NONE, /* optinfo_flags */
6969 gate_tree_pta, /* gate */
6970 NULL, /* execute */
6971 NULL, /* sub */
6972 NULL, /* next */
6973 0, /* static_pass_number */
6974 TV_NONE, /* tv_id */
6975 PROP_cfg | PROP_ssa, /* properties_required */
6976 0, /* properties_provided */
6977 0, /* properties_destroyed */
6978 0, /* todo_flags_start */
6979 TODO_rebuild_alias /* todo_flags_finish */
6980 }
6981 };
6982
6983 /* A dummy pass to cause points-to information to be computed via
6984 TODO_rebuild_alias. */
6985
6986 struct gimple_opt_pass pass_build_ealias =
6987 {
6988 {
6989 GIMPLE_PASS,
6990 "ealias", /* name */
6991 OPTGROUP_NONE, /* optinfo_flags */
6992 gate_tree_pta, /* gate */
6993 NULL, /* execute */
6994 NULL, /* sub */
6995 NULL, /* next */
6996 0, /* static_pass_number */
6997 TV_NONE, /* tv_id */
6998 PROP_cfg | PROP_ssa, /* properties_required */
6999 0, /* properties_provided */
7000 0, /* properties_destroyed */
7001 0, /* todo_flags_start */
7002 TODO_rebuild_alias /* todo_flags_finish */
7003 }
7004 };
7005
7006
7007 /* Return true if we should execute IPA PTA. */
7008 static bool
7009 gate_ipa_pta (void)
7010 {
7011 return (optimize
7012 && flag_ipa_pta
7013 /* Don't bother doing anything if the program has errors. */
7014 && !seen_error ());
7015 }
7016
7017 /* IPA PTA solutions for ESCAPED. */
7018 struct pt_solution ipa_escaped_pt
7019 = { true, false, false, false, false, false, NULL };
7020
7021 /* Associate node with varinfo DATA. Worker for
7022 cgraph_for_node_and_aliases. */
7023 static bool
7024 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7025 {
7026 if ((node->symbol.alias || node->thunk.thunk_p)
7027 && node->symbol.analyzed)
7028 insert_vi_for_tree (node->symbol.decl, (varinfo_t)data);
7029 return false;
7030 }
7031
7032 /* Execute the driver for IPA PTA. */
7033 static unsigned int
7034 ipa_pta_execute (void)
7035 {
7036 struct cgraph_node *node;
7037 struct varpool_node *var;
7038 int from;
7039
7040 in_ipa_mode = 1;
7041
7042 init_alias_vars ();
7043
7044 if (dump_file && (dump_flags & TDF_DETAILS))
7045 {
7046 dump_symtab (dump_file);
7047 fprintf (dump_file, "\n");
7048 }
7049
7050 /* Build the constraints. */
7051 FOR_EACH_DEFINED_FUNCTION (node)
7052 {
7053 varinfo_t vi;
7054 /* Nodes without a body are not interesting. Especially do not
7055 visit clones at this point for now - we get duplicate decls
7056 there for inline clones at least. */
7057 if (!cgraph_function_with_gimple_body_p (node))
7058 continue;
7059
7060 gcc_assert (!node->clone_of);
7061
7062 vi = create_function_info_for (node->symbol.decl,
7063 alias_get_name (node->symbol.decl));
7064 cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
7065 }
7066
7067 /* Create constraints for global variables and their initializers. */
7068 FOR_EACH_VARIABLE (var)
7069 {
7070 if (var->symbol.alias && var->symbol.analyzed)
7071 continue;
7072
7073 get_vi_for_tree (var->symbol.decl);
7074 }
7075
7076 if (dump_file)
7077 {
7078 fprintf (dump_file,
7079 "Generating constraints for global initializers\n\n");
7080 dump_constraints (dump_file, 0);
7081 fprintf (dump_file, "\n");
7082 }
7083 from = constraints.length ();
7084
7085 FOR_EACH_DEFINED_FUNCTION (node)
7086 {
7087 struct function *func;
7088 basic_block bb;
7089
7090 /* Nodes without a body are not interesting. */
7091 if (!cgraph_function_with_gimple_body_p (node))
7092 continue;
7093
7094 if (dump_file)
7095 {
7096 fprintf (dump_file,
7097 "Generating constraints for %s", cgraph_node_name (node));
7098 if (DECL_ASSEMBLER_NAME_SET_P (node->symbol.decl))
7099 fprintf (dump_file, " (%s)",
7100 IDENTIFIER_POINTER
7101 (DECL_ASSEMBLER_NAME (node->symbol.decl)));
7102 fprintf (dump_file, "\n");
7103 }
7104
7105 func = DECL_STRUCT_FUNCTION (node->symbol.decl);
7106 push_cfun (func);
7107
7108 /* For externally visible or attribute used annotated functions use
7109 local constraints for their arguments.
7110 For local functions we see all callers and thus do not need initial
7111 constraints for parameters. */
7112 if (node->symbol.used_from_other_partition
7113 || node->symbol.externally_visible
7114 || node->symbol.force_output)
7115 {
7116 intra_create_variable_infos ();
7117
7118 /* We also need to make function return values escape. Nothing
7119 escapes by returning from main though. */
7120 if (!MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
7121 {
7122 varinfo_t fi, rvi;
7123 fi = lookup_vi_for_tree (node->symbol.decl);
7124 rvi = first_vi_for_offset (fi, fi_result);
7125 if (rvi && rvi->offset == fi_result)
7126 {
7127 struct constraint_expr includes;
7128 struct constraint_expr var;
7129 includes.var = escaped_id;
7130 includes.offset = 0;
7131 includes.type = SCALAR;
7132 var.var = rvi->id;
7133 var.offset = 0;
7134 var.type = SCALAR;
7135 process_constraint (new_constraint (includes, var));
7136 }
7137 }
7138 }
7139
7140 /* Build constriants for the function body. */
7141 FOR_EACH_BB_FN (bb, func)
7142 {
7143 gimple_stmt_iterator gsi;
7144
7145 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7146 gsi_next (&gsi))
7147 {
7148 gimple phi = gsi_stmt (gsi);
7149
7150 if (! virtual_operand_p (gimple_phi_result (phi)))
7151 find_func_aliases (phi);
7152 }
7153
7154 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7155 {
7156 gimple stmt = gsi_stmt (gsi);
7157
7158 find_func_aliases (stmt);
7159 find_func_clobbers (stmt);
7160 }
7161 }
7162
7163 pop_cfun ();
7164
7165 if (dump_file)
7166 {
7167 fprintf (dump_file, "\n");
7168 dump_constraints (dump_file, from);
7169 fprintf (dump_file, "\n");
7170 }
7171 from = constraints.length ();
7172 }
7173
7174 /* From the constraints compute the points-to sets. */
7175 solve_constraints ();
7176
7177 /* Compute the global points-to sets for ESCAPED.
7178 ??? Note that the computed escape set is not correct
7179 for the whole unit as we fail to consider graph edges to
7180 externally visible functions. */
7181 ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
7182
7183 /* Make sure the ESCAPED solution (which is used as placeholder in
7184 other solutions) does not reference itself. This simplifies
7185 points-to solution queries. */
7186 ipa_escaped_pt.ipa_escaped = 0;
7187
7188 /* Assign the points-to sets to the SSA names in the unit. */
7189 FOR_EACH_DEFINED_FUNCTION (node)
7190 {
7191 tree ptr;
7192 struct function *fn;
7193 unsigned i;
7194 varinfo_t fi;
7195 basic_block bb;
7196 struct pt_solution uses, clobbers;
7197 struct cgraph_edge *e;
7198
7199 /* Nodes without a body are not interesting. */
7200 if (!cgraph_function_with_gimple_body_p (node))
7201 continue;
7202
7203 fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
7204
7205 /* Compute the points-to sets for pointer SSA_NAMEs. */
7206 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7207 {
7208 if (ptr
7209 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7210 find_what_p_points_to (ptr);
7211 }
7212
7213 /* Compute the call-use and call-clobber sets for all direct calls. */
7214 fi = lookup_vi_for_tree (node->symbol.decl);
7215 gcc_assert (fi->is_fn_info);
7216 clobbers
7217 = find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers));
7218 uses = find_what_var_points_to (first_vi_for_offset (fi, fi_uses));
7219 for (e = node->callers; e; e = e->next_caller)
7220 {
7221 if (!e->call_stmt)
7222 continue;
7223
7224 *gimple_call_clobber_set (e->call_stmt) = clobbers;
7225 *gimple_call_use_set (e->call_stmt) = uses;
7226 }
7227
7228 /* Compute the call-use and call-clobber sets for indirect calls
7229 and calls to external functions. */
7230 FOR_EACH_BB_FN (bb, fn)
7231 {
7232 gimple_stmt_iterator gsi;
7233
7234 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7235 {
7236 gimple stmt = gsi_stmt (gsi);
7237 struct pt_solution *pt;
7238 varinfo_t vi;
7239 tree decl;
7240
7241 if (!is_gimple_call (stmt))
7242 continue;
7243
7244 /* Handle direct calls to external functions. */
7245 decl = gimple_call_fndecl (stmt);
7246 if (decl
7247 && (!(fi = lookup_vi_for_tree (decl))
7248 || !fi->is_fn_info))
7249 {
7250 pt = gimple_call_use_set (stmt);
7251 if (gimple_call_flags (stmt) & ECF_CONST)
7252 memset (pt, 0, sizeof (struct pt_solution));
7253 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7254 {
7255 *pt = find_what_var_points_to (vi);
7256 /* Escaped (and thus nonlocal) variables are always
7257 implicitly used by calls. */
7258 /* ??? ESCAPED can be empty even though NONLOCAL
7259 always escaped. */
7260 pt->nonlocal = 1;
7261 pt->ipa_escaped = 1;
7262 }
7263 else
7264 {
7265 /* If there is nothing special about this call then
7266 we have made everything that is used also escape. */
7267 *pt = ipa_escaped_pt;
7268 pt->nonlocal = 1;
7269 }
7270
7271 pt = gimple_call_clobber_set (stmt);
7272 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7273 memset (pt, 0, sizeof (struct pt_solution));
7274 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7275 {
7276 *pt = find_what_var_points_to (vi);
7277 /* Escaped (and thus nonlocal) variables are always
7278 implicitly clobbered by calls. */
7279 /* ??? ESCAPED can be empty even though NONLOCAL
7280 always escaped. */
7281 pt->nonlocal = 1;
7282 pt->ipa_escaped = 1;
7283 }
7284 else
7285 {
7286 /* If there is nothing special about this call then
7287 we have made everything that is used also escape. */
7288 *pt = ipa_escaped_pt;
7289 pt->nonlocal = 1;
7290 }
7291 }
7292
7293 /* Handle indirect calls. */
7294 if (!decl
7295 && (fi = get_fi_for_callee (stmt)))
7296 {
7297 /* We need to accumulate all clobbers/uses of all possible
7298 callees. */
7299 fi = get_varinfo (find (fi->id));
7300 /* If we cannot constrain the set of functions we'll end up
7301 calling we end up using/clobbering everything. */
7302 if (bitmap_bit_p (fi->solution, anything_id)
7303 || bitmap_bit_p (fi->solution, nonlocal_id)
7304 || bitmap_bit_p (fi->solution, escaped_id))
7305 {
7306 pt_solution_reset (gimple_call_clobber_set (stmt));
7307 pt_solution_reset (gimple_call_use_set (stmt));
7308 }
7309 else
7310 {
7311 bitmap_iterator bi;
7312 unsigned i;
7313 struct pt_solution *uses, *clobbers;
7314
7315 uses = gimple_call_use_set (stmt);
7316 clobbers = gimple_call_clobber_set (stmt);
7317 memset (uses, 0, sizeof (struct pt_solution));
7318 memset (clobbers, 0, sizeof (struct pt_solution));
7319 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7320 {
7321 struct pt_solution sol;
7322
7323 vi = get_varinfo (i);
7324 if (!vi->is_fn_info)
7325 {
7326 /* ??? We could be more precise here? */
7327 uses->nonlocal = 1;
7328 uses->ipa_escaped = 1;
7329 clobbers->nonlocal = 1;
7330 clobbers->ipa_escaped = 1;
7331 continue;
7332 }
7333
7334 if (!uses->anything)
7335 {
7336 sol = find_what_var_points_to
7337 (first_vi_for_offset (vi, fi_uses));
7338 pt_solution_ior_into (uses, &sol);
7339 }
7340 if (!clobbers->anything)
7341 {
7342 sol = find_what_var_points_to
7343 (first_vi_for_offset (vi, fi_clobbers));
7344 pt_solution_ior_into (clobbers, &sol);
7345 }
7346 }
7347 }
7348 }
7349 }
7350 }
7351
7352 fn->gimple_df->ipa_pta = true;
7353 }
7354
7355 delete_points_to_sets ();
7356
7357 in_ipa_mode = 0;
7358
7359 return 0;
7360 }
7361
7362 struct simple_ipa_opt_pass pass_ipa_pta =
7363 {
7364 {
7365 SIMPLE_IPA_PASS,
7366 "pta", /* name */
7367 OPTGROUP_NONE, /* optinfo_flags */
7368 gate_ipa_pta, /* gate */
7369 ipa_pta_execute, /* execute */
7370 NULL, /* sub */
7371 NULL, /* next */
7372 0, /* static_pass_number */
7373 TV_IPA_PTA, /* tv_id */
7374 0, /* properties_required */
7375 0, /* properties_provided */
7376 0, /* properties_destroyed */
7377 0, /* todo_flags_start */
7378 TODO_update_ssa /* todo_flags_finish */
7379 }
7380 };